def __call__(self): strategy_start = parse_datetime(self.ctx.params.strategy_start) strategy_end = parse_datetime(self.ctx.params.strategy_end) pattern = "^%s(?P<date>\w+)%s$"%(self.ctx.params.data_file_prefix, self.ctx.params.data_file_suffix) p = re.compile(pattern) date_format = self.ctx.params.file_date_format file_prefix = self.ctx.params.file_prefix prefix = self.ctx.params.coord_prefix data_file_paths = [] coords_paths = {} calibrations_paths = OrderedDict() date = strategy_start DAY = timedelta(1) while strategy_start <= date <= strategy_end: path = os.path.join(file_prefix, "%04d"%date.year, "%02d"%date.month, "%02d"%date.day) if not is_skiped(path, date, prefix): coords_paths[format_date(date)] = get_coords_path(path, date, prefix) data_files_per_day = [] for filename in sorted(os.listdir(path)): match = p.match(filename) if match is not None: file_date = parse_datetime(match.group("date"), date_format) #TODO: something smarter needs to be done here! if strategy_start <= file_date <= strategy_end: if date.year == file_date.year and date.month == file_date.month and date.day == file_date.day: data_files_per_day.append(os.path.join(path, filename)) else: if self.ctx.params.verbose: print("skipping", os.path.join(path, filename)) if len(data_files_per_day) > 0: if is_calibration_day(path, date): calibrations_paths[format_date(date)] = (get_calibration_path(path, date), data_files_per_day) else: data_file_paths.append(data_files_per_day) date = date + DAY assert len(data_file_paths)!=0 or len(calibrations_paths)!=0, "No files match requirements" if self.ctx.params.verbose: print("Total number of days found: ", len(data_file_paths)) print("Total number of calibration days found: ", len(calibrations_paths)) self.ctx.data_file_paths = data_file_paths self.ctx.coords_paths = coords_paths self.ctx.calibrations_paths = calibrations_paths
def mask_artefacts(self): """ Mask artefacts. :return: mask after specified artefacts are masked. """ date = self.ctx.strategy_start artefacts = utils.load_file(self.ctx.params.artefacts_file, dtype="S", delimiter=", ") for artefact_date, start, end in artefacts: artefact_start_date = parse_datetime("%s-%s:00"%(artefact_date, start)) artefact_end_date = parse_datetime("%s-%s:59"%(artefact_date, end)) if date.date() == artefact_start_date.date(): start_date_in_h = artefact_start_date.hour + artefact_start_date.minute / 60 + artefact_start_date.second / 3600 end_date_in_h = artefact_end_date.hour + artefact_end_date.minute / 60 + artefact_end_date.second / 3600 idx0 = np.searchsorted(self.ctx.time_axis, start_date_in_h) idx1 = np.searchsorted(self.ctx.time_axis, end_date_in_h) self.ctx.tod_vx.mask[:, idx0:idx1] = True
def get_observation_start_from_hdf5(path): """ Extracts the observation date :param path: path to the file :returns observation_start: datetime object with the date """ file_name = os.path.basename(path) datelen = 15 #yyyymmdd_hhmmss name = file_name.split(".")[0] date = name[-datelen:] return parse_datetime(date, "%Y%m%d_%H%M%S")
def mask_artefacts(self): """ Mask artefacts. :return: mask after specified artefacts are masked. """ date = self.ctx.strategy_start artefacts = utils.load_file(self.ctx.params.artefacts_file, dtype="S", delimiter=", ") for artefact_date, start, end in artefacts: artefact_start_date = parse_datetime("%s-%s:00" % (artefact_date, start)) artefact_end_date = parse_datetime("%s-%s:59" % (artefact_date, end)) if date.date() == artefact_start_date.date(): start_date_in_h = artefact_start_date.hour + artefact_start_date.minute / 60 + artefact_start_date.second / 3600 end_date_in_h = artefact_end_date.hour + artefact_end_date.minute / 60 + artefact_end_date.second / 3600 idx0 = np.searchsorted(self.ctx.time_axis, start_date_in_h) idx1 = np.searchsorted(self.ctx.time_axis, end_date_in_h) self.ctx.tod_vx.mask[:, idx0:idx1] = True
def get_observation_start_from_fits(path): """ Extracts the observation date :param path: path to the file :returns observation_start: datetime object with the date """ with fits.open(path, mode='readonly', memmap=False) as hdu: primary = hdu[0] date_format = "%Y/%m/%d-%H:%M:%S" observation_start = primary.header["DATE-OBS"] + "-" + primary.header["TIME-OBS"][:-4] observation_start = parse_datetime(observation_start, date_format) del hdu[0].data return observation_start
def get_observation_start_from_fits(path): """ Extracts the observation date :param path: path to the file :returns observation_start: datetime object with the date """ with fits.open(path, mode='readonly', memmap=False) as hdu: primary = hdu[0] date_format = "%Y/%m/%d-%H:%M:%S" observation_start = primary.header["DATE-OBS"] + "-" + primary.header[ "TIME-OBS"][:-4] observation_start = parse_datetime(observation_start, date_format) del hdu[0].data return observation_start
def __call__(self): strategy_start = parse_datetime(self.ctx.params.strategy_start) strategy_end = parse_datetime(self.ctx.params.strategy_end) pattern = "^%s(?P<date>\w+)%s$" % (self.ctx.params.data_file_prefix, self.ctx.params.data_file_suffix) p = re.compile(pattern) date_format = self.ctx.params.file_date_format file_prefix = self.ctx.params.file_prefix prefix = self.ctx.params.coord_prefix data_file_paths = [] coords_paths = {} calibrations_paths = OrderedDict() date = strategy_start DAY = timedelta(1) while strategy_start <= date <= strategy_end: path = os.path.join(file_prefix, "%04d" % date.year, "%02d" % date.month, "%02d" % date.day) if not is_skiped(path, date, prefix): coords_paths[format_date(date)] = get_coords_path( path, date, prefix) data_files_per_day = [] for filename in sorted(os.listdir(path)): match = p.match(filename) if match is not None: file_date = parse_datetime(match.group("date"), date_format) #TODO: something smarter needs to be done here! if strategy_start <= file_date <= strategy_end: if date.year == file_date.year and date.month == file_date.month and date.day == file_date.day: data_files_per_day.append( os.path.join(path, filename)) else: if self.ctx.params.verbose: print("skipping", os.path.join(path, filename)) if len(data_files_per_day) > 0: if is_calibration_day(path, date): calibrations_paths[format_date(date)] = ( get_calibration_path(path, date), data_files_per_day) else: data_file_paths.append(data_files_per_day) date = date + DAY assert len(data_file_paths) != 0 or len( calibrations_paths) != 0, "No files match requirements" if self.ctx.params.verbose: print("Total number of days found: ", len(data_file_paths)) print("Total number of calibration days found: ", len(calibrations_paths)) self.ctx.data_file_paths = data_file_paths self.ctx.coords_paths = coords_paths self.ctx.calibrations_paths = calibrations_paths
def _parse_source(source_entry, cal_date): return CalibrationSource(parse_datetime(cal_date + '-' + source_entry[0]), np.radians(source_entry[1]), np.radians(source_entry[2]), source_entry[3].split(':')[1].strip())
def _parse_source(source_entry, cal_date): return CalibrationSource(parse_datetime(cal_date + '-' + source_entry[0]), np.radians(source_entry[1]), np.radians(source_entry[2]), source_entry[3].split(':')[1].strip() )