def get_star_or_catalog_name(star: StarDescription, suffix: str = "") -> StarUI: extradata = None if star.has_metadata("VSX"): catalog = star.get_metadata("VSX") catalog_name, separation = catalog.name, catalog.separation extradata = catalog.extradata elif star.has_metadata("SITE"): catalog = star.get_metadata("SITE") catalog_name, separation = catalog.our_name, catalog.separation else: catalog_name, separation = star.local_id, None filename_no_suff_no_ext = ( f"{int(catalog_name):05}" if isinstance(catalog_name, int) or isinstance(catalog_name, np.int64) or catalog_name.isdigit() else f"{catalog_name}") filename_no_ext = f"{filename_no_suff_no_ext}{suffix}" filename_raw_no_ext = filename_no_ext filename_no_ext = replace_spaces(replace_dots(filename_raw_no_ext)) filename_raw_no_suff_no_ext = filename_no_suff_no_ext filename_no_suff_no_ext = replace_spaces( replace_dots(filename_raw_no_suff_no_ext)) return StarUI( catalog_name, separation, extradata, filename_raw_no_ext, filename_no_ext, filename_raw_no_suff_no_ext, filename_no_suff_no_ext, )
def determine_period_and_epoch( df: DataFrame, star: StarDescription, method=lombscargle_period_calculate) -> Tuple[Period, str]: if star.has_metadata("SITE") and star.get_metadata( "SITE").period is not None: return _preset_period(star) return method(df.copy(), star)
def get_star_names(star: StarDescription) -> List[str]: def unique_append(alist, new): if new not in alist: alist.append(new) names = [] if star.has_metadata("VSX"): unique_append(names, star.get_metadata("VSX").name) if star.has_metadata("SITE"): unique_append(names, star.get_metadata("SITE").our_name) return names if len(names) > 0 else None
def _add_catalog_data_to_sd( sd: StarDescription, ucac4_sd: StarDescription, overwrite ): """ Add UCAC4 catalog data to a stardescription if there is none yet, or if overwrite is True """ if ucac4_sd is not None and not sd.has_metadata("UCAC4") or overwrite: do_calibration.add_catalog_data_to_sd( sd, ucac4_sd.vmag, ucac4_sd.vmag_err, ucac4_sd.aavso_id, "UCAC4", ucac4_sd.coords, extradata=ucac4_sd.get_metadata("UCAC4").extradata, )
def get_random_star_descriptions(nr=10): result = [] for idx in range(nr): result.append( StarDescription(local_id=idx, coords=SkyCoord(idx, idx, unit="deg"))) return result
def stardesc(self, id, ra, dec, vmag, e_vmag, obs): return StarDescription( local_id=id, coords=SkyCoord(ra, dec, unit="deg"), vmag=vmag, vmag_err=e_vmag, obs=obs, )
def get_vsx_in_field(star_descriptions, max_separation=0.01): logging.info("Get VSX in field star descriptions") vsx_catalog, vsx_dict = create_vsx_astropy_catalog() star_catalog = create_star_descriptions_catalog(star_descriptions) idx, d2d, d3d = match_coordinates_sky(vsx_catalog, star_catalog) result = [] for index_vsx, entry in enumerate(d2d): if entry.value < max_separation: star_local_id = idx[index_vsx] + 1 star_coords = star_catalog[star_local_id - 1] result_entry = StarDescription() result_entry.local_id = star_local_id result_entry.coords = star_coords _add_catalog_match_to_entry('VSX', result_entry, vsx_dict, index_vsx, entry.value) result.append(result_entry) logging.info("Found {} VSX stars in field: {}".format(len(result), [star.local_id for star in result])) return result
def get_metadata_from_star(star: StarDescription, metadata_id: str, warnings: bool = False): result = star.get_metadata(metadata_id) if result is None and warnings: logging.warning( f"The metadata {metadata_id} for star {star.local_id} does not exist" ) return result
def _add_vsx_metadata_to_star_description(catalog_name: str, star: StarDescription, vsx_dict, index_vsx, separation): assert star.metadata is not None vsx_name = vsx_dict["extradata"][index_vsx]["Name"] star.aavso_id = vsx_name match = CatalogData( key=catalog_name, catalog_id=vsx_name, name=vsx_name, separation=separation, coords=SkyCoord( vsx_dict["ra_deg_np"][index_vsx], vsx_dict["dec_deg_np"][index_vsx], unit="deg", ), extradata=vsx_dict["extradata"][index_vsx], ) star.metadata = match return match
def get_candidates(threshold_prob=0.5, check_flag=False): result = [] df = get_upsilon_candidates_raw(threshold_prob, check_flag) if df is None: return result positions = reading.read_world_positions(settings.worldposdir) for index, row in df.iterrows(): upsilon_match = UpsilonMatch(name_of_catalog='Upsilon', var_type=row['label'], probability=row['probability'], flag=row['flag'], period=row['period']) result.append( StarDescription(local_id=index, match=upsilon_match, coords=SkyCoord(positions[int(index)][0], positions[int(index)][1], unit='deg'))) return result
def filter_comparison_stars( star: StarDescription, comp_stars: ComparisonStars ) -> Tuple[ComparisonStars, ComparisonStars]: compstar_match: star_description.CompStarData = star.get_metadata("COMPSTARS") sd_ids = compstar_match.compstar_ids # skip part of the work if the list is equal. Biggest set first otherwise diff is always empty if len(set(comp_stars.ids).difference(set(sd_ids))) == 0: return comp_stars filtered_compstars = comp_stars.get_filtered_comparison_stars(sd_ids) extra_compstar = comp_stars.get_filtered_comparison_stars([compstar_match.extra_id]) logging.debug( f"get star compstars from catalog: {len(filtered_compstars.ids)}, {filtered_compstars.ids}" ) return filtered_compstars, extra_compstar
def get_star_descriptions(star_id_list=None): # returns {'name': [ra.deg, dec.deg ]} positions = reading.read_world_positions(settings.worldposdir) result = [] plist = "all stars" if star_id_list is not None: plist = star_id_list logging.info(f'Reading star descriptions for: {plist} with size {len(init.star_list)}') for key in positions: star_id = reading.filename_to_star(str(key)) if star_id_list is None or star_id in star_id_list: result.append(StarDescription(local_id=reading.filename_to_star(str(key)), coords=SkyCoord(positions[key][0], positions[key][1], unit='deg'))) return result
def test_metadata(self): star = StarDescription(local_id=id, coords=SkyCoord(10, 11, unit="deg")) self.assertEqual({}, star.metadata) star.metadata = CompStarData([1]) self.assertEqual(1, len(star.metadata)) star.metadata = StarMetaData() self.assertEqual(2, len(star.metadata)) try: star.set_metadata(StarMetaData(), True) except: return self.fail("Adding duplicate keys to the dictionary should fail")
def get_star_description_from_tuple(ucactuple: UcacTuple) -> StarDescription: startuple, zone, run_nr = ucactuple ra, dec = UCAC4.get_real_ra_dec(startuple.ra, startuple.spd) coords=SkyCoord(ra, dec, unit="deg") vmag=startuple.apass_mag_V / 1000 vmag_err=abs(startuple.apass_mag_sigma_V / 100) aavso_id=UCAC4.zone_and_run_nr_to_name(zone, run_nr) sd = StarDescription( coords=coords, vmag=vmag, vmag_err=vmag_err, aavso_id=aavso_id, ) do_calibration.add_catalog_data_to_sd( sd, vmag, vmag_err, aavso_id, "UCAC4", coords, extradata=ucactuple[0]._asdict() # get the StarTuple from the UcacTuple ) return sd
def plot_phase_diagram( star: StarDescription, curve: DataFrame, fullphasedir, suffix="", period: Period = None, epoch: str = None, write_plot=True, filter_func=None, ): assert period is not None try: logging.debug( f"Starting plot phase diagram with {star} and {fullphasedir}") starui: utils.StarUI = utils.get_star_or_catalog_name( star, suffix=f"_phase{suffix}") names = utils.get_star_names(star) catalog_title = (f"{names[0]}" if names is not None and names[0] is not star.local_id else "") save_location = Path(fullphasedir, starui.filename_no_ext + ".png") upsilon_match = star.get_metadata("UPSILON") upsilon_text = (upsilon_match.get_upsilon_string() if upsilon_match is not None else "") if curve is None: logging.info("Curve of star {} is None".format(star.local_id)) return t_np = curve["floatJD"] y_np = curve["realV"].to_numpy() dy_np = curve["realErr"].to_numpy() # Epoch centering epoch_location = len(t_np) + np.argmin(abs(t_np - epoch)) if ( epoch is not None and epoch >= np.min(t_np) and epoch <= np.max(t_np)) else None epoch_float = float(epoch) if epoch else None t_np_zeroed = epoch_to_zero_time(epoch_float, t_np) # calculate phase where epoch at t=0 will corresponds to phase 0 phased_t = np.mod(t_np_zeroed / period.period, 1) phased_lc = y_np[:] save_to_disk = False if (save_to_disk): np.save(Path("./", "epoch_t_" + starui.filename_no_ext + ".txt"), t_np) np.save(Path("./", "epoch_y_" + starui.filename_no_ext + ".txt"), y_np) np.save(Path("./", "epoch_tz_" + starui.filename_no_ext + ".txt"), t_np_zeroed) np.save(Path("./", "epoch_pt_" + starui.filename_no_ext + ".txt"), phased_t) np.save(Path("./", "epoch_py_" + starui.filename_no_ext + ".txt"), phased_lc) if filter_func is not None: phased_t, phased_lc = filter_func(phased_t, phased_lc) phased_t_final = np.append(np.subtract(phased_t, 1), phased_t) phased_lc_final = np.append(phased_lc, phased_lc) # error values are clipped to +0.5 and -0.5 phased_err = np.clip(np.append(dy_np, dy_np), -0.5, 0.5) plt_result = _plot_phase_diagram( phased_t_final, phased_lc_final, phased_err, epoch_location, write_plot, save_location, star, catalog_title, period, upsilon_text, ) if not write_plot: return plt_result, t_np, y_np return save_location except Exception as ex: template = "An exception of type {0} occurred. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex.args) import traceback print(traceback.print_exc()) logging.error(message) logging.error(f"Error during plot phase: {star.local_id}")
def _plot_lightcurve(star: StarDescription, curve: DataFrame, chartsdir, suffix=f"_light", jd_adjusting_func=None, xlabel='JD', plot_width=20, plot_height=16, plot_dpi=150, markersize=6, errorbars=True, rotate=False, write_plot=True): try: star_id = star.local_id logging.debug(f"Plotting lightcurve for {star_id}") starui: utils.StarUI = utils.get_star_or_catalog_name(star, suffix=suffix) save_location = Path(chartsdir, starui.filename_no_ext + ".png") start = timer() upsilon_match = (star.get_metadata("UPSILON") if star.has_metadata("UPSILON") else None) upsilon_text = (upsilon_match.get_upsilon_string() if upsilon_match is not None else "") end = timer() logging.debug(f"timing upsilon stuff {end - start}") names = utils.get_star_names(star) catalog_title = (f"{names[0]}" if names is not None and names[0] is not star.local_id else "") plot_title = f"{catalog_title}\nStar {star.local_id}" if curve is None: logging.info(f"Curve is None for star {star_id}") return # e.g for phase locked curve if jd_adjusting_func is None: curve["realJD"] = curve["floatJD"] else: curve['realJD'] = jd_adjusting_func(curve['floatJD']) fig = plt.figure(figsize=(plot_width, plot_height), dpi=plot_dpi, facecolor='w', edgecolor='k') if errorbars: plt.errorbar( curve["realJD"], curve["realV"], yerr=curve["realErr"], linestyle="none", marker="o", ecolor="gray", elinewidth=1, ms=markersize, ) else: plt.plot( curve["realJD"], curve["realV"], linestyle="none", linewidth="1", marker="o", ms=markersize, ) plt.xlabel(xlabel, labelpad=TITLE_PAD) curve_max = curve["realV"].max() curve_min = curve["realV"].min() jd_min = curve["realJD"].min() jd_max = curve["realJD"].max() plot_max = curve_max + 0.1 plot_min = curve_min - 0.1 plt.ylim(plot_min, plot_max) plt.xlim(jd_min, jd_max) # plt.xticks(range(math.floor(jd_min), math.ceil(jd_max))) # nop = lambda *a, **k: None # print("limits are here:", star_id, "min:", curve['realJD'].min(), "max", curve['realJD'].max(), # "first 10", curve['realJD'][:10], "first 10 orig:", curve['floatJD'][:10], "len", len(curve['realJD']), # "describe", curve['realJD'].describe()) \ # if curve['realJD'].max() == 0 else nop() plt.gca().invert_yaxis() plt.gca().xaxis.set_major_locator(MaxNLocator(integer=True)) plt.title(plot_title, pad=TITLE_PAD) plt.ticklabel_format(useOffset=False, style="plain") if rotate: plt.xticks(rotation=25) plt.tight_layout() if write_plot: start = timer() fig.savefig(save_location) end = timer() logging.debug(f"timing saving fig {end - start}") plt.close(fig) plt.clf() return save_location return plt, curve["realJD"], curve["realV"] except Exception as ex: template = "An exception of type {0} occurred. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex.args) import traceback print(traceback.print_exc()) logging.error(message) logging.error(f"Error during plot lightcurve: {star.local_id}")
def vsx_filter(star: StarDescription): if star.get_catalog('VSX') is not None: return True return False
def stardesc(id, ra, dec): return StarDescription(local_id=id, coords=SkyCoord(ra, dec, unit="deg"))
def set_lines(star: StarDescription): with open(star.path) as file: star.obs = sum(1 for line in file if line.rstrip()) return star
def get_empty_star_descriptions(star_id_list=None): # returns {'name': [ra.deg, dec.deg ]} result = [] for star in star_id_list: result.append(StarDescription(local_id=star)) return result
def block(star: StarDescription, resultdir: str, images_prefix: str, explore: bool): try: is_vsx = star.has_metadata("VSX") starui: utils.StarUI = utils.get_star_or_catalog_name(star, suffix="_phase") txt_path = Path( Path(star.result["phase"]).parent, "txt", starui.filename_no_suff_no_ext + ".txt", ) try: parsed_toml = toml.load(txt_path) except FileNotFoundError: logging.error( f"Could not load txt file with phase information from {txt_path}" ) ucac4_name, ucac4_mag, ucac4_coords, ucac4_colors, ucac4_rgb = get_ucac4_info( star, parsed_toml) name = (f"\n{parsed_toml['our_name']}" if "our_name" in parsed_toml else f"OUR_NAME_{star.local_id}" ) # get the period if it's present, and change -1 to None period = float( parsed_toml['period']) if 'period' in parsed_toml else -1 display_period = f"{period:.6f}" if period > 0 else "None" var_type_raw = get_from_toml('var_type', parsed_toml, UNKNOWN) var_type = f"{var_type_raw}" phase_url = f"{images_prefix}{starui.filename_no_ext}.png" light_url = f"{images_prefix}{starui.filename_no_suff_no_ext}_light.png" if utils.is_var_type_aperiodic(var_type, period) or utils.is_check(var_type): main_url = light_url second_url = phase_url else: main_url = phase_url second_url = light_url epoch = f"{parsed_toml['epoch']}" if 'epoch' in parsed_toml else UNKNOWN vsx_var_flag = f" ({parsed_toml['vsx_var_flag']})" if 'vsx_var_flag' in parsed_toml else "" tomlseparation = parsed_toml[ 'separation'] if 'separation' in parsed_toml else None ucacseparation = star.coords.separation( star.get_metadata("UCAC4").coords).degree if star.has_metadata( "UCAC4") else None realseparation = ucacseparation if ucacseparation is not None else tomlseparation if tomlseparation is not None else None separation = f"<li>separation: +/- {realseparation * 3600:.0f} arcsec</li>" if realseparation is not None else "" var_type_link = f"<a href='https://www.aavso.org/vsx/index.php?view=help.vartype&nolayout=1&abbrev=" \ f"{var_type}'>{var_type}</a>" if var_type != UNKNOWN else var_type mag_range = f"{parsed_toml['range']}" minmax = (f"<li>minmax: {parsed_toml['minmax']}</li>" if "minmax" in parsed_toml else "") vsx_link = ( f'<li><a target="_blank" rel="noopener noreferrer" ' f'href="https://www.aavso.org/vsx/index.php?view=detail.top&oid={starui.extradata["OID"]}"' f">VSX link</a></li>" if is_vsx else "") points_removed = ( f"<li>Outliers removed: {parsed_toml['points_removed']}</li>" if parsed_toml["points_removed"] > 0 else "") optional_compstars = ( f'<a href="{images_prefix}{starui.filename_no_suff_no_ext}_compstarsA.png" ' f'title="Plot of all comparison stars used to measure this star">C</a>, ' f'<a href="{images_prefix}{starui.filename_no_suff_no_ext}_compstarsB.png" ' f'title="Plot of all comparison stars used to measure this star + the star itself">C+V</a>, ' if "compA" in star.result else "") optional_stats = ( f'<li>stats: <a href="{images_prefix}{starui.filename_no_suff_no_ext}_merr_vs_jd.png" ' f'alt="Plot of magnitude error vs JD">merr_vs_jd</a></li>' if "merr_vs_jd" in star.result else "") optional_explore = ( f'<div class="fl w-70 pa2 ba">' f' <img class="special-img-class" src="{second_url}" alt="{second_url}"/>' f'</div>' if explore else "") # show extra phase link if the main image is not a phase diagram and the period is not -1 optional_phase = ( f'<li><a href="{phase_url}" alt="Phase diagram">Phase diagram</a></li>' if utils.is_check(var_type) else "") optional_comments = (f'<li>Comments: {parsed_toml["comments"]}' if "comments" in parsed_toml and explore else "") result = f"""<div class="bb-l b--black-10 w-100"> <div class="fl w-70 pa2 ba"> <img class="special-img-class" src="{main_url}" alt="{main_url}"/> </div>{optional_explore} <div class="fl w-30 pa2 ba"> <ul> <li>{ucac4_name} (mag:{ucac4_mag})</li> <li>name: {name}</li>{ucac4_coords} <li>coords: {utils.get_hms_dms_sober(star.coords)} (ours)</li>{separation}{points_removed} <li>period (d): {display_period}</li>{minmax} <li>mag. range: {mag_range}</li> <li>{ucac4_colors}</li>{optional_comments} <li><a target="_blank" rel="noopener noreferrer" href="https://www.aavso.org/vsx/index.php?view=about.vartypessort">type</a>: {var_type_link}{vsx_var_flag}</li> {vsx_link}<li>epoch: {epoch}</li> <li><a href="{images_prefix}vsx_and_star_{starui.filename_no_suff_no_ext}.png">finder chart</a></li> <li><a href="{images_prefix}{starui.filename_no_suff_no_ext}_ext.txt">observations</a></li> <li>light curve: <a title="Standard plot of julian date on X and magnitude on Y" href="{images_prefix}{starui.filename_no_suff_no_ext}_light.png">Normal</a>, <a title="Lightcurve with empty spaces cut out, taking period into account. If period is 1 day, and we have a gap of 1 day and 5 minutes, we cut out the 1 day and leave the 5 minutes so that the shape of the curve is preserved" href="{images_prefix}{starui.filename_no_suff_no_ext}_lightpa.png">PA</a>, <a title="All observations are plotted sequentially, without taking into account day/time" href="{images_prefix}{starui.filename_no_suff_no_ext}_lightcont.png">Continuous</a></li>{optional_phase} <li>comparison stars: {optional_compstars}<a title="text list of comparison stars" href="{images_prefix}{starui.filename_no_suff_no_ext}_comps.txt">list</a></li>{optional_stats} </ul> </div> </div> """ return result except Exception as ex: template = "An exception of type {0} occurred. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex.args) import traceback print(traceback.print_exc()) logging.error(message) logging.error("File not found error in store and curve for star" + star.path) return f'<div class="fl w-100 pa2 ba">Could not load {txt_path}</div>'
def read_vast_lightcurves( star: StarDescription, compstarproxy, star_result_dict, do_light, do_light_raw, do_phase, do_aavso, aavso_limit, basedir: str, chartsdir: Path, phasedir: Path, aavsodir: Path, jdfilter: List[float] = None, jd_excl_stop: float = None, ): start = timer() if star.local_id not in star_result_dict: star_result_dict[star.local_id] = {} temp_dict = star_result_dict[star.local_id] if star.path == "": logging.debug(f"Path for {star.local_id} is empty") return if not do_light and not do_phase: logging.debug("Nothing to do, no charts or phase needed") logging.debug( f"Reading lightcurves for star {star.local_id} at path {star.path} for {star}..." ) # comp_mags = [x.vmag for x in comparison_stars] try: df = reading.read_lightcurve_vast(star.path) df["floatJD"] = df["JD"].astype(np.float) df = utils.jd_filter_df(df, jdfilter) if df is None or len(df) == 0: logging.info(f"No lightcurve found for {star.path}") return comp_stars = compstarproxy.value filtered_compstars, check_star = do_compstars.filter_comparison_stars( star, comp_stars) comp_stars = None df["realV"], df[ "realErr"] = do_compstars.calculate_ensemble_photometry( df, filtered_compstars, do_compstars.weighted_value_ensemble_method) df = df.dropna(subset=[ "realV", "realErr" ]) # drop rows where the ensemble photometry failed do_calibration.add_catalog_data_to_sd( star, df["realV"].mean(), df["realErr"].mean(), None, "PHOTOMETRY", star.coords, ) starui: utils.StarUI = utils.get_star_or_catalog_name(star, suffix="") period, epoch = determine_period_and_epoch(df, star) # override user calculated epoch with user-supplied epoch if (star.has_metadata("SITE") and star.get_metadata("SITE").epoch is not None): epoch = star.get_metadata("SITE").epoch # filter depending on phase diagram df, points_removed = phase_dependent_outlier_removal(df, period) temp_dict["compstars"] = write_compstars(star, starui.filename_no_ext, phasedir, filtered_compstars, check_star) ymin, ymax, epoch_min, epoch_max, t_start, t_end = *calculate_min_max_epochs( df["floatJD"], df["realV"]), logging.debug( f"Debug for the min/max percentiles: {ymin}, {ymax}, {epoch_min}, {epoch_max}, {t_start}, {t_end}" ) write_toml(starui.filename_no_ext, phasedir, period, epoch, star, points_removed, ymin, ymax, t_start, t_end) if do_phase and "phase" not in star.result: temp_dict["phase"] = plot_phase_diagram(star, df.copy(), phasedir, period=period, epoch=epoch, suffix="") # anova_period, epoch = determine_period_and_epoch(df, star, method=anova_period_calculate) # logging.info(f"anova period is {anova_period}") # temp_dict['phase'] = plot_phase_diagram(star, df.copy(), phasedir, period=anova_period, # epoch=epoch, suffix="b") if do_light and "light" not in star.result: temp_dict["lightpa"] = plot_lightcurve_pa(star, df.copy(), chartsdir, period) temp_dict["lightcont"] = plot_lightcurve_continuous( star, df.copy(), chartsdir) temp_dict['light'] = plot_lightcurve(star, df.copy(), chartsdir) if do_aavso and "aavso" not in star.result: settings = toml.load("settings.txt") temp_dict["aavso"] = do_aavso_report.report( star, df.copy(), filtered_compstars, check_star, target_dir=aavsodir, sitelat=settings["sitelat"], sitelong=settings["sitelong"], sitealt=settings["sitealt"], camera_filter="V", observer=settings["observer"], chunk_size=aavso_limit, ) filtered_compstars = None # logging.error(f"contents of temp_dict is: {temp_dict}") star_result_dict[star.local_id] = temp_dict except Exception as ex: template = "An exception of type {0} occurred. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex.args) logging.error(message) import traceback print(traceback.print_exc()) logging.error( f"Exception during read_lightcurve for {star.path}, size JD: {len(df['floatJD'])}," f"size V: {len(df['realV'])}") end = timer() logging.debug(f"Full lightcurve/phase: {end - start}")
def metadata_filter(star: StarDescription, catalog_name, exclude=[]): catalogs = star.get_metadata_list() return catalog_name in catalogs and len( [x for x in exclude if x in catalogs]) == 0
def update_img( star: StarDescription, record: ImageRecord, neighbours: List[StarDescription], resultdir: str, platesolved_file: str, ): resultlines = [] fig = plt.figure(figsize=(36, 32), dpi=dpi, facecolor="w", edgecolor="k") wcs = do_calibration.get_wcs(platesolved_file) data, shapex, shapey = reading.get_fits_data(platesolved_file) backgr = data.mean() data = data.reshape(shapex, shapey) data = np.pad( data, (padding, padding), "constant", constant_values=(backgr, backgr) ) starxy = SkyCoord.to_pixel(star.coords, wcs=wcs, origin=0) # add main target add_circle(record.x, record.y, 3, "b") startoml = load_toml(star, resultdir) star.vmag = startoml["vmag"] resultlines.append(log_star(star, -1)) random_offset = False offset1 = 70 offset2 = 10 # add neighbours for idx, nstar in enumerate(neighbours): add_pixels(nstar, wcs, 0) add_circle(nstar.xpos, nstar.ypos, 4, "g") if random_offset: xrandoffset = random.randint(3, 4) yrandoffset = random.randint(2, 3) xsignrand = random.choice([-1.0, 1.0]) ysignrand = random.choice([-1.0, 1.0]) offset1 = xsignrand * xrandoffset offset2 = ysignrand * yrandoffset # https://matplotlib.org/3.1.0/api/_as_gen/matplotlib.pyplot.arrow.html plt.annotate( f"{idx}", xy=(round(nstar.xpos), round(nstar.ypos)), xycoords="data", xytext=(offset1, offset2), textcoords="offset points", size=NEIGHBOUR_TEXT_SIZE, color="red", arrowprops=dict(arrowstyle="-", color="grey", alpha=0.2), ) resultlines.append(log_star(nstar, idx)) # loading and painting ucac stars radius = 0.08 ucac_stars: List[MinimalStarTuple] = ucac4.get_region_minimal_star_tuples( star.coords.ra.deg, star.coords.dec.deg, radius ) logging.info(f"Looping on {len(ucac_stars)} UCAC4 stars") for ucac_star in ucac_stars: coord = SkyCoord(ucac_star.ra, ucac_star.dec, unit="deg") if star.coords.separation(coord).degree > radius: continue xy = SkyCoord.to_pixel(coord, wcs=wcs, origin=0) x, y = round(xy[0].item(0)), round(xy[1].item(0)) add_circle(x, y, 2, "c") plt.annotate( f"{ucac_star.id[-3:]}", xy=(x, y), xycoords="data", xytext=(2, 2), textcoords="offset points", size=UCAC4_TEXT_SIZE, arrowprops=dict(arrowstyle="-", color="grey", alpha=0.2), ) median = np.median(data) # data = ndimage.interpolation.rotate(data, record.rotation) plt.imshow(data, cmap="gray_r", origin="lower", vmin=0, vmax=min(median * 5, 65536)) starui = utils.get_star_or_catalog_name(star) save_inspect_image = Path( Path(resultdir) / "inspect", f"{starui.filename_no_ext}_{Path(resultdir).name}_inspect.png", ) save_inspect_txt = Path( Path(resultdir) / "inspect", f"{starui.filename_no_ext}_{Path(resultdir).name}_inspect.txt", ) fig.savefig(save_inspect_image) logging.info(f"Saved file as {save_inspect_image}.") write_file(star, save_inspect_txt, resultlines) plt.close(fig) plt.clf()
def stardesc(self, id, ra, dec): return StarDescription(local_id=id, coords=SkyCoord(ra, dec, unit='deg'))
def get_ucac4_of_sd(star: StarDescription) -> CatalogData: catdata: CatalogData = star.get_metadata("UCAC4") return catdata
def catalog_filter(star: StarDescription, catalog_name): return star.has_metadata(catalog_name)
def get_apass_row_to_star_descriptions(row): return StarDescription(coords=SkyCoord(row['RAJ2000'], row['DEJ2000'], unit='deg'), vmag=row['Vmag'], e_vmag=row['e_Vmag'])
def report( star: StarDescription, df_curve: DataFrame, comp_stars: ComparisonStars, check_star: ComparisonStars, target_dir: Path, sitelat, sitelong, sitealt, camera_filter=None, observer="RMH", chunk_size=None, ): df = df_curve.sort_values("JD") star_match_ucac4 = (star.get_metadata("UCAC4").name if star.has_metadata("UCAC4") else None) star_match_vsx = star.get_metadata("VSX").name if star.has_metadata( "VSX") else None var_display_name = star_match_ucac4 if star_match_vsx is None else star_match_vsx var_display_name = (var_display_name if var_display_name is not None else f"Star_{star.local_id}") # utils.replace_spaces(f"{star.local_id:05}" if star.aavso_id is None else star.aavso_id) starui = utils.get_star_or_catalog_name(star) earth_location = EarthLocation(lat=sitelat, lon=sitelong, height=sitealt * u.m) logging.debug(f"Starting aavso report with star:{star}") if chunk_size is None: chunk_size = df.shape[0] star_chunks = [ df[i:i + chunk_size] for i in range(0, df.shape[0], chunk_size) ] chunk_counters = 0 kname = check_star.star_descriptions[0].get_metadata("UCAC4").catalog_id notes = f"Standard mag: K = {check_star.comp_catalogmags[0]:.3f}" filterdict = None # Setting up the filter value if camera_filter is None: filterdict = read_camera_filters.read_filters() # print(filterdict) filterlambda = lambda x: filterdict[x] else: filterlambda = lambda x: camera_filter for chunk in star_chunks: chunk_counters += 1 suffix = f"_{chunk_counters}.txt" if len(star_chunks) != 1 else ".txt" filename = Path(target_dir, f"{starui.filename_no_ext}_ext{suffix}") with open(filename, "w") as fp: writer = aavso.ExtendedFormatWriter( fp, observer, location=(sitelat, sitelong, sitealt), software="https://github.com/mrosseel/vast-automation", type="EXTENDED", obstype="CCD", ) for _, row in chunk.iterrows(): # logging.info(row, type(row)) jd = row["JD"] if jd in check_star.observations[0]: # adding an offset of 30 to get instrumental mags to be positive (recommended by aavso) check_mag = f"{check_star.observations[0][jd][0] + 30:.3f}" else: check_mag = "na" writer.addrow({ "name": var_display_name, "date": jd, "magnitude": row["realV"], "magnitude_error": row["realErr"], "filter": filterlambda(row["JD"]), "transformed": "NO", "magnitude_type": "STD", "comparison_name": "ENSEMBLE", "comparison_magnitude": "na", "check_name": kname, "check_magnitude": check_mag, "airmass": calculate_airmass(star.coords, earth_location, row["floatJD"]).value, "group": "na", "chart": "na", "notes": notes, }) writer.flush() return Path(target_dir, f"{starui.filename_no_ext}_ext.txt")