def add_oh_downtime(session, tree):
    """
    add OHIGGINS downtime necessary for satellite observations with higher priority
    
    :param session: dictionary with session specific fields 
    :param tree: xml parameter tree
    :param settings:  content of settings.ini file
    :return: None
    """
    settings = configparser.ConfigParser()
    settings.read("settings.ini")
    if settings.has_section("general"):
        pad = settings["general"].getint("Oh_down_extra_min", 5)
    else:
        pad = 5
    path = settings["general"].get("Oh_down")
    if not path or not os.path.exists(path):
        Message.addMessage("WARNING: OH down time file \"{}\" not found!".format(path))
        return

    s_start = session["date"]
    s_end = session["date"] + datetime.timedelta(hours=session["duration"])
    pattern = re.compile(
        r'(APPROVED|SCHEDULED|PLANNED).*(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}).(\d{4}-\d{2}-\d{2}T\d{2}:\d{2})')

    with open(path) as f:
        for l in f:
            g = pattern.search(l)
            if g is not None:
                start = datetime.datetime.strptime(g.group(2), "%Y-%m-%dT%H:%M") - datetime.timedelta(minutes=pad)
                end = datetime.datetime.strptime(g.group(3), "%Y-%m-%dT%H:%M") + datetime.timedelta(minutes=pad)

                insert_station_setup_with_time(start, end, s_start, s_end, session, tree, "OHIGGINS", "down",
                                               "satellite observation")
def adjust_template(output_path, session, templates, pre_scheduling_functions):
    """
    adjustes the template XML file with session specific fields

    :param output_path: fields to be stored in statistics file
    :param session: dictionary with session specific fields
    :param templates: list of templates for this session type
    :return: list of all generated XML files
    """
    folder = os.path.join(output_path,
                          os.path.basename(os.path.dirname(templates[0])))
    if not os.path.exists(folder):
        os.makedirs(folder)
        with open(os.path.join(folder, ".gitignore."), "w") as f:
            f.write("*\n!summary.txt\n!.gitignore")
        with open(os.path.join(folder, "summary.txt"), "w") as f:
            f.write("")

    out = []
    for template in templates:
        tree = adjust_xml(template, session, pre_scheduling_functions)
        Message.log(False)

        output_dir = os.path.join(folder, session["code"])
        if not os.path.exists(output_dir):
            os.makedirs(output_dir)
        newFile = os.path.join(output_dir, os.path.basename(template))
        out.append(newFile)
        tree.write(newFile, pretty_print=True)
    Message.log(True)

    return out
def start_uploading(settings):
    """
    start uploading process based on "upload_scheduler.txt"

    :return: None
    """
    today = datetime.date.today()
    with open("upload_scheduler.txt", "r") as f:
        lines = [l for l in f if l.strip()]

        for i, lin in enumerate(lines):
            path, time, status = lin.split()
            program = os.path.basename(os.path.dirname(path))
            target_date = datetime.datetime.strptime(time, "%Y-%m-%d").date()

            if status == "pending" and target_date == today:
                if program not in args.observing_programs:
                    Message.addMessage(
                        "skipping uploading program: {} ({})".format(
                            program, os.path.basename(path)),
                        dump="header")
                    continue

                Message.clearMessage("program")
                Message.clearMessage("session")
                Message.clearMessage("download")
                Message.clearMessage("log")

                upload = settings[program].get("upload", "no").lower()
                if upload == "ivs":
                    code = os.path.basename(os.path.dirname(path))
                    Transfer.upload(path)
                    emails = Helper.read_emails(settings[program],
                                                args.fallback_email)
                    SendMail.writeMail_upload(code, emails)
                elif upload == "no":
                    pass
                elif upload == "gow":
                    code = os.path.basename(os.path.dirname(path))
                    Transfer.upload_GOW_ftp(path)
                    emails = Helper.read_emails(settings[program],
                                                args.fallback_email)
                    SendMail.writeMail_upload(code, emails)
                else:
                    emails = upload.split(",")
                    with open(os.path.join(path, "selected", "email.txt"),
                              "r") as f:
                        body = f.read()
                    SendMail.writeMail(os.path.join(path, "selected"), emails,
                                       body)

                lines[i] = lin.replace("pending", "uploaded")

    with open("upload_scheduler.txt", "w") as f:
        for lout in lines:
            path, time, status = lout.split()
            target_date = datetime.datetime.strptime(time, "%Y-%m-%d").date()
            # do not list sessions older than 1 year in upload_scheduler.txt
            if target_date + datetime.timedelta(days=365) > today:
                f.write(lout)
Beispiel #4
0
def VGOS_procs_block(**kwargs):
    path = kwargs["path"]
    session = kwargs["session"]
    stations = session["stations"]
    program_code = kwargs["program_code"]

    procs_cat = Path("Templates") / program_code / "procs.cat"
    if not procs_cat.exists():
        Message.addMessage("[WARNING] procs.cat file not found!",
                           dump="session")
        return

    skd_file = next(Path(path).glob("*.skd"))

    re_begin = re.compile(r"BEGIN\s+(\w+)")
    re_end = re.compile(r"END\s+(\w+)")
    with open(skd_file, 'a') as f_skd:
        f_skd.write("$PROCS\n")

        with open(procs_cat) as f_procs:
            flag_write = False
            for l in f_procs:
                re_begin_search = re_begin.search(l)
                if re_begin_search:
                    station = re_begin_search.group(1)
                    if station == "COMMON" or station in stations:
                        flag_write = True

                if re_end.search(l):
                    f_skd.write(l)
                    flag_write = False

                if flag_write:
                    f_skd.write(l)
Beispiel #5
0
def _vex_in_sked_format(**kwargs):
    path_selected = kwargs["path"]
    code = kwargs["session"]["code"].lower()
    name_skd = (code + ".skd")
    name_vex = (code + ".vex")
    path_to_skd = Path(path_selected) / name_skd

    # create backup of original .vex file
    path_to_vex = Path(path_selected) / name_vex
    backup_vex = Path(path_selected) / (code + ".vex.orig.VieSchedpp")
    shutil.copy(str(path_to_vex), str(backup_vex))

    settings = configparser.ConfigParser()
    settings.read("settings.ini")

    path_sked = settings["general"].get("path_sked")

    if path_sked is None:
        Message.addMessage(
            "[WARNING] failed to generate .vex file in \"sked\" format! Undefined path to sked folder",
            dump="session")
        return

    shutil.copy(str(path_to_skd), str(Path(path_sked) / name_skd))
    cwd = Path.cwd()
    try:
        os.chdir(path_sked)
        child = pexpect.spawn("sked " + name_skd)
        child.expect(r'\?')
        child.sendline("vwc " + name_vex)
        child.expect(r'\?')
        child.sendline("q")
        child.close()

        newVex = Path(path_sked) / name_vex
        shutil.copy(str(newVex), str(path_to_vex))

    except:
        os.chdir(cwd)
        Message.addMessage(
            "[WARNING] failed to generate .vex file in \"sked\" format",
            dump="session")

    os.chdir(cwd)

    with open(path_to_vex) as f:
        all = f.readlines()
        all[1] = "*  schedule gernerated by VieSched++, converted with sked\n"

    with open(path_to_vex, 'w') as f:
        f.writelines(all)

    pass
Beispiel #6
0
def delegate_send(slot):
    """
    change delegate send() function to different function (e.g.: send via gmail-server or bkg-server)

    :param slot: name ("Gmail", "BKG")
    :return: None
    """
    if slot.lower() == "gmail":
        SendMail.send = send_gmail
        print("Send mails via [GMAIL]")
    elif slot.lower() == "bkg":
        SendMail.send = send_bkg
        print("Send mails via [BKG]")
    else:
        Message.addMessage("ERROR: SMTP server slot \"{:}\" not found".format(slot))
Beispiel #7
0
def download_ftp():
    """
    download session master files

    :return: None
    """

    # master files are stored in "MASTER" directory
    path = "MASTER"
    if not os.path.exists(path):
        os.makedirs(path)

    # define files do download
    now = datetime.datetime.now()
    year = now.year % 100
    names = ["master{:02d}.txt".format(year),
             "master{:02d}-int.txt".format(year),
             "mediamaster{:02d}.txt".format(year)]

    # also download master file for next year in case today is December
    if now.month == 12:
        names.append("master{:02d}.txt".format(year + 1))
        names.append("master{:02d}-int.txt".format(year + 1))
        names.append("mediamaster{:02d}.txt".format(year + 1))

    try:
        # connect to FTP server
        ftp = FTP("ivs.bkg.bund.de")
        ftp.login()
        ftp.cwd("pub/vlbi/ivscontrol")

        # get a list of all files at FTP server
        ftp_files = ftp.nlst()

        # download all files from FTP server
        for name in names:
            Message.addMessage("FTP download: {}... ".format(name), dump="download", endLine=False)
            # skip files which are not present
            if name not in ftp_files:
                Message.addMessage("file not found", dump="download")
                continue
            out = os.path.join(path, name)
            msg = ftp.retrbinary("RETR " + name, open(out, 'wb').write)
            Message.addMessage("msg: {}".format(msg), dump="download")

    except ftp_errors as err:
        Message.addMessage("#### ERROR {} ####".format(err), dump="download")
        Message.addMessage(traceback.format_exc(), dump="download")
def sefd_based_snr(**kwargs):
    """
    change target SNR based on baseline sensitivity

    :param kwargs: mandatory keyword-arguments: "tree", "session"
    :return: None
    """
    tree = kwargs["tree"]
    session = kwargs["session"]

    high_high, high_low, low_low = get_baseline_sensitivity_groups(
        session["stations"])
    add_group(tree.find("./baseline"), "high_high", high_high)
    add_group(tree.find("./baseline"), "high_low", high_low)
    add_group(tree.find("./baseline"), "low_low", low_low)
    Message.addMessage("add baseline SEFD based SNR targets")
    Message.addMessage(
        "    new baseline group \"{:2}\" with {:d} members".format(
            "high_high", len(high_high)))
    Message.addMessage(
        "    new baseline group \"{:2}\" with {:d} members".format(
            "high_low", len(high_low)))
    Message.addMessage(
        "    new baseline group \"{:2}\" with {:d} members".format(
            "low_low", len(low_low)))

    add_parameter(tree.find("./baseline/parameters"), "low_snr",
                  ["minSNR", "minSNR"], ["18", "13"], [("band", "X"),
                                                       ("band", "S")])
    add_parameter(tree.find("./baseline/parameters"), "mid_snr",
                  ["minSNR", "minSNR"], ["20", "15"], [("band", "X"),
                                                       ("band", "S")])
    add_parameter(tree.find("./baseline/parameters"), "high_snr",
                  ["minSNR", "minSNR"], ["22", "17"], [("band", "X"),
                                                       ("band", "S")])

    insert_setup_node(session,
                      "high_high",
                      tree.find("./baseline/setup"),
                      "low_snr",
                      tag="group")
    insert_setup_node(session,
                      "high_low",
                      tree.find("./baseline/setup"),
                      "mid_snr",
                      tag="group")
    insert_setup_node(session,
                      "low_low",
                      tree.find("./baseline/setup"),
                      "high_snr",
                      tag="group")
def adjust_INT_observing_mode_VLBA_256_8_RDV(**kwargs):
    tree = kwargs["tree"]
    session = kwargs["session"]
    folder = kwargs["folder"]

    flag_VLBA = any(
        ["VLBA" in sta or "PIETOWN" in sta for sta in session["stations"]])

    if flag_VLBA:
        mode = "256-8(RDV)"
        tree.find("./mode/skdMode").text = mode
        # catalogs to absolut path
        cwd = os.getcwd()
        os.chdir(folder)
        tree.find("./catalogs/freq").text = os.path.abspath("./freq.cat")
        tree.find("./catalogs/rx").text = os.path.abspath("./rx.cat")
        tree.find("./catalogs/tracks").text = os.path.abspath("./tracks.cat")
        os.chdir(cwd)

        Message.addMessage("Changing observing mode to \"{}\"".format(mode))
        Message.addMessage("Changing freq, tracks and rx catalogs")
def adjust_R1_observing_mode(**kwargs):
    """
    change target SNR based on baseline sensitivity

    :param kwargs: mandatory keyword-arguments: "tree", "session"
    :return: None
    """
    tree = kwargs["tree"]
    session = kwargs["session"]
    mediamaster = os.path.join(
        "MASTER", "mediamaster{:02d}.txt".format(session["date"].year % 100))

    flag_512 = False
    with open(mediamaster) as f:
        for l in f:
            l = l.strip()
            if l.startswith("|"):
                l = l.strip("|")
                if l.startswith(session["name"]):
                    stations = l.split("|")[6]
                    stations = stations.split()[0]
                    stations = [
                        stations[i:i + 4] for i in range(0, len(stations), 4)
                    ]
                    g_module = sum(sta[3] == "G" for sta in stations)
                    if g_module == len(stations):
                        flag_512 = True
                    elif g_module > 0:
                        Message.addMessage(
                            "WARNING: undefined observing mode! {:d} stations with 512 Mbps, "
                            "{:d} stations with 256 Mbps".format(
                                g_module,
                                len(stations) - g_module),
                            dump="header")
                    break

    if flag_512:
        mode = "512-16(CONT11)"
        tree.find("./mode/skdMode").text = mode
        Message.addMessage("Changing observing mode to \"{}\"".format(mode))
def add_comment(station, p_start, p_end, parameter_name, comment=""):
    """
    add comment to email in case of parameter change

    :param station: station name
    :param p_start: start time of parameter change
    :param p_end: end time of parameter change
    :param parameter_name:  name of the parameter in xml file
    :param comment: optional comment
    :return: None
    """
    start_str = "{:%Y.%m.%d %H:%M:%S}".format(p_start)
    end_str = "{:%Y.%m.%d %H:%M:%S}".format(p_end)
    dur = (p_end - p_start).total_seconds() / 60
    if comment:
        comment = "- " + comment

    if parameter_name == "down":
        parameter_name = "downtime"

    Message.addMessage("   add {}: {:8s} {:s} {:s} ({:.0f} minutes) {:s}".format(
        parameter_name, station, start_str, end_str, dur, comment))
Beispiel #12
0
def is_valid_root_setup(station, p_start, p_end, session_start, session_end,
                        tree_node):
    """
    check if this tree node is valid root entry point for new setup node
    
    :param station: station name
    :param p_start: start time of parameter change
    :param p_end: end time of parameter change
    :param session_start: session start time
    :param session_end: session end time
    :param tree_node: xml parameter tree node
    :return: True if it is a valid setup start point, otherwise False
    """
    member = "__all__"
    setup_start = session_start
    setup_end = session_end

    for s in tree_node:
        if s.tag == "member":
            member = s.text
        if s.tag == "start":
            setup_start = datetime.datetime.strptime(s.text,
                                                     "%Y.%m.%d %H:%M:%S")
        if s.tag == "end":
            setup_end = datetime.datetime.strptime(s.text, "%Y.%m.%d %H:%M:%S")

    flag = True
    if not (member == "__end__" or member == station):
        flag = False
    if not (p_start >= setup_start and p_end <= setup_end):
        flag = False

    if setup_start < p_start < setup_end < p_end:
        Message.addMessage("   ERROR: overlapping parameter setups!")
    if p_start < setup_start < p_end < setup_end:
        Message.addMessage("   ERROR: overlapping parameter setups!")

    return flag
def add_downtime_intensives(**kwargs):
    """
    add down time based on IVS intensive schedule master

    it will extend the downtime based on entries in setting.ini file

    :param kwargs: mandatory keyword-arguments: "tree", "session"
    :return: None
    """

    Message.addMessage("Look for Intensive downtime")
    settings = configparser.ConfigParser()
    settings.read("settings.ini")

    tree = kwargs["tree"]
    session = kwargs["session"]

    if settings.has_section("general"):
        pad = settings["general"].getint("ivs_int_downtime_extra_min", 10)
    else:
        pad = 10

    year = session["date"].year % 100
    master_ivs = os.path.join("MASTER", "master{:02d}-int.txt".format(year))
    # master_si = os.path.join("MASTER", "master{:02d}-int-SI.txt".format(year))
    intensives = read_master([master_ivs])
    s_start = session["date"]
    s_end = session["date"] + datetime.timedelta(hours=session["duration"])
    for int in intensives:
        int_start = int["date"] - datetime.timedelta(minutes=pad)
        int_end = int["date"] + datetime.timedelta(
            hours=int["duration"]) + datetime.timedelta(minutes=pad)

        for sta in int["stations"]:
            if sta in session["stations"]:
                insert_station_setup_with_time(int_start, int_end, s_start,
                                               s_end, session, tree, sta,
                                               "down", int["name"])
Beispiel #14
0
def _vlba_vex_adjustments(**kwargs):
    path_selected = kwargs["path"]
    code = kwargs["session"]["code"].lower()
    name_vex = (code + ".vex")

    # create backup of original .vex file
    path_to_vex = Path(path_selected) / name_vex

    settings = configparser.ConfigParser()
    settings.read("settings.ini")

    path_script = settings["general"].get("path_vex_correction_script")

    if path_script is None:
        Message.addMessage(
            "[WARNING] failed to execute \"vlba_vex_correct\" script",
            dump="session")
        return

    cwd = Path.cwd()

    try:
        os.chdir(Path(path_script).parent)

        p = subprocess.run([path_script, path_to_vex],
                           capture_output=True,
                           text=True)
        log = p.stdout
        if log:
            Message.addMessage(log, dump="log")
        errlog = p.stderr
        if errlog:
            Message.addMessage(errlog, dump="log")
        p.check_returncode()
    finally:
        os.chdir(cwd)

    os.chdir(cwd)
Beispiel #15
0
def _vex_in_sked_format(**kwargs):
    Message.addMessage(
        "\nconvert .vex file to \"sked\" format for external parsers",
        dump="session")
    path_selected = kwargs["path"]
    code = kwargs["session"]["code"].lower()
    name_skd = (code + ".skd")
    name_vex = (code + ".vex")
    path_to_skd = Path(path_selected) / name_skd

    # create backup of original .vex file
    path_to_vex = (Path(path_selected) / name_vex).absolute()
    backup_vex = Path(path_selected) / (code + ".vex.orig.VieSchedpp")
    Message.addMessage("    - generate backup of {} to {}".format(
        path_to_vex, backup_vex),
                       dump="session")
    shutil.copy(str(path_to_vex), str(backup_vex))

    settings = configparser.ConfigParser()
    settings.read("settings.ini")

    path_sked = settings["general"].get("path_sked")
    sked_executable = settings["general"].get("sked_executable")
    if sked_executable is None:
        Message.addMessage(
            "no path to sked executable define - defaulting to 'sked'",
            dump="session")
        sked_executable = "sked"

    if path_sked is None:
        Message.addMessage(
            "[WARNING] failed to generate .vex file in \"sked\" format! Undefined path to sked folder",
            dump="session")
        return

    Message.addMessage("    - copy {} to {}".format(path_to_skd,
                                                    Path(path_sked) /
                                                    name_skd),
                       dump="session")
    shutil.copy(str(path_to_skd), str(Path(path_sked) / name_skd))

    cwd = Path.cwd()
    try:
        Message.addMessage("    - change dir to {}".format(path_sked),
                           dump="session")
        os.chdir(path_sked)
        if Path(name_vex).is_file():
            Message.addMessage(
                "    - delete existing .vex file {}".format(name_vex),
                dump="session")
            Path(name_vex).unlink()
        Message.addMessage(
            "    - execute sked to parse .vex file".format(path_sked),
            dump="session")
        child = pexpect.spawn(sked_executable + " " + name_skd)
        child.expect(r'\?')
        child.sendline("vwc " + name_vex)
        child.expect(r'\?')
        child.sendline("q")
        child.close()

        newVex = Path(path_sked) / name_vex
        Message.addMessage("    - copy new .vex file from {} to {}".format(
            newVex, path_to_vex),
                           dump="session")
        shutil.copy(str(newVex), str(path_to_vex))
    except:
        Message.addMessage(
            "[ERROR] failed to generate .vex file in \"sked\" format",
            dump="session")
        Message.addMessage(traceback.format_exc(), dump="session")

    finally:
        Message.addMessage("    - change dir to {}".format(cwd),
                           dump="session")
        os.chdir(str(cwd))

    with open(path_to_vex) as f:
        all = f.readlines()
        all[1] = "*  schedule generated by VieSched++, converted with sked\n"

    with open(path_to_vex, 'w') as f:
        f.writelines(all)

    pass
Beispiel #16
0
def _vlba_vex_adjustments(**kwargs):
    Message.addMessage("adjust .vex file for VLBA needs", dump="session")
    path_selected = kwargs["path"]
    code = kwargs["session"]["code"].lower()
    name_vex = (code + ".vex")

    path_to_vex = (Path(path_selected) / name_vex).absolute()

    settings = configparser.ConfigParser()
    settings.read("settings.ini")

    path_script = settings["general"].get("path_vex_correction_script")

    if path_script is None:
        Message.addMessage(
            "[ERROR] failed to execute \"vlba_vex_correct\" script - script not found",
            dump="session")
        return

    cwd = Path.cwd()

    try:
        Message.addMessage("    - change dir to {}".format(
            Path(path_script).parent),
                           dump="session")
        os.chdir(Path(path_script).parent)

        Message.addMessage("    - execute {} {}".format(
            path_script, path_to_vex),
                           dump="session")
        p = subprocess.run([path_script, path_to_vex],
                           capture_output=True,
                           text=True)
        log = p.stdout
        if log:
            Message.addMessage(log, dump="log")
        errlog = p.stderr
        if errlog:
            Message.addMessage(errlog, dump="log")
        p.check_returncode()
    except:
        Message.addMessage(
            "[ERROR] failed to execute \"vlba_vex_correct\" script - returns error",
            dump="session")
        Message.addMessage(traceback.format_exc(), dump="session")
    finally:
        Message.addMessage("    - change dir to {}".format(cwd),
                           dump="session")
        os.chdir(str(cwd))
def start_scheduling(settings):
    """
    start VieSched++ AUTO processing

    :return: None
    """
    prefix = settings["general"].get("prefix_output_folder", "Schedules")
    if os.sep == "\\":
        prefix = prefix.replace("/", "\\")

    path_scheduler = settings["general"].get("path_to_scheduler")

    if not os.path.exists("upload_scheduler.txt"):
        with open("upload_scheduler.txt", "w"):
            pass

    Message.addMessage("VieSched++ AUTO report", dump="header")
    today = datetime.date.today()
    Message.addMessage("date: {:%B %d, %Y}".format(today), dump="header")
    Message.addMessage("computer: {}, Python {}".format(
        platform.node(), platform.python_version()),
                       dump="header")
    if settings["general"].get("institute") is not None:
        Message.addMessage("institution: {}".format(
            settings["general"].get("institute")),
                           dump="header")
    Message.addMessage(
        "This is an automatically generated message. Do NOT reply to this email directly!",
        dump="header")
    # download files
    if not args.no_download:
        Transfer.download_ftp()
        Transfer.download_http()
    else:
        Message.addMessage("no downloads", dump="header")

    # start processing all programs
    for program in settings.sections():
        if program == "general":
            continue
        if program not in args.observing_programs:
            print("skipping scheduling observing program: {}".format(program))
            continue

        s_program = settings[program]
        emails = Helper.read_emails(s_program, args.fallback_email)
        delta_days = s_program.get("schedule_date", "10")
        if args.date is not None:
            try:
                target_day = datetime.datetime.strptime(args.date, '%Y-%m-%d')
                delta_days = (target_day.date() - today).days
                year = target_day.year % 100
            except ValueError:
                print("ERROR while interpreting target date (-d option): {}".
                      format(args.date))
                print(
                    "    must be in format \"yyyy-mm-dd\" (e.g.: 2020-01-31)")
                return
        else:
            try:
                target_day = datetime.datetime.strptime(delta_days, '%Y-%m-%d')
                delta_days = (target_day.date() - today).days
                year = target_day.year % 100
            except ValueError:
                if delta_days.isnumeric():
                    delta_days = int(delta_days)
                    target_day = today + datetime.timedelta(days=delta_days)
                    year = target_day.year % 100
                else:
                    delta_days = delta_days.lower()
                    year = today.year % 100

        delta_days_upload = s_program.getint("upload_date", 7)
        statistic_field = s_program.get("statistics").split(",")
        upload = True
        if s_program.get("upload", "no").lower() == "no":
            upload = False

        # read master files
        template_master = Template(s_program.get("master", "master$YY.txt"))
        master = template_master.substitute(YY=str(year))

        master = os.path.join("MASTER", master)
        sessions = Helper.read_master(master)

        try:
            pattern = s_program["pattern"]
            f = Helper.find_function(select_best_functions,
                                     s_program["function"])[0]
            f_pre = Helper.find_function(
                pre_scheduling_functions,
                s_program.get("pre_scheduling_functions", ""))
            f_post = Helper.find_function(
                post_scheduling_functions,
                s_program.get("post_scheduling_functions", ""))

            start(sessions, path_scheduler, program, pattern, f, emails,
                  delta_days, delta_days_upload, statistic_field, prefix,
                  upload, f_pre, f_post)

        except:
            Message.addMessage("#### ERROR ####")
            Message.addMessage(traceback.format_exc())
            SendMail.writeErrorMail(emails)
def start(master,
          path_scheduler,
          code,
          code_regex,
          select_best,
          emails,
          delta_days,
          delta_days_upload,
          statistic_field,
          output_path="./Schedules/",
          upload=False,
          pre_fun=None,
          post_fun=None):
    """
    start auto processing for one observing program

    :param master: list of dictionaries with session specific fields read from session master
    :param path_scheduler: path to VieSched++ executable
    :param code: observing program code
    :param code_regex: regular expression to match session name
    :param select_best: function to select best schedule from statistics dataframe
    :param emails: list of email addresses
    :param statistic_field: fields to be stored in statistics file
    :param delta_days: time offset in days from where schedule should be generated
    :param delta_days_upload: time offset in days when schedule should be updated
    :param output_path: prefix for output path
    :param upload: flag if session needs to be uploaded
    :param pre_fun: list of functions executed prior to scheduling
    :param post_fun: list of functions executed after to scheduling
    :return: None
    """

    Message.clearMessage("program")
    pattern = re.compile(code_regex)

    Message.addMessage("=== {} observing program ===".format(code),
                       dump="program")
    Message.addMessage("contact:", dump="program")
    for email in emails:
        Message.addMessage("    {}".format(email), dump="program")

    Message.addMessage("schedule master contained {} sessions".format(
        len(master)),
                       dump="program")
    today = datetime.date.today()
    sessions = []
    if delta_days == "next":
        for s in master:
            if s["date"].date() < today:
                continue
            if pattern.match(s["name"]):
                sessions.append(s)
                break
        upload = False
    else:
        target_day = today + datetime.timedelta(days=delta_days)
        Message.addMessage("date offset: {} days".format(delta_days),
                           dump="program")
        Message.addMessage(
            "target start time: {:%B %d, %Y}".format(target_day),
            dump="program")
        sessions = [
            s for s in master if pattern.match(s["name"])
            if s["date"].date() == target_day
        ]
    Message.addMessage("{} session(s) will be processed".format(len(sessions)),
                       dump="program")

    # get list of templates
    templates = []
    template_path = os.path.join("Templates", code)
    for file in os.listdir(template_path):
        if file.endswith(".xml"):
            templates.append(os.path.join(template_path, file))

    # loop over all sessions
    for session in sessions:
        Message.clearMessage("session")
        Message.clearMessage("log")
        Message.addMessage("##### {} #####".format(session["code"].upper()))
        Message.addMessage(
            "{name} ({code}) start {date} duration {duration}h stations {stations}"
            .format(**session))
        xmls = adjust_template(output_path, session, templates, pre_fun)
        xml_dir = os.path.dirname(xmls[0])
        df_list = []

        flag_VLBA = any(
            ["VLBA" in sta or "PIETOWN" in sta for sta in session["stations"]])
        flag_DSS = any([sta.startswith("DSS") for sta in session["stations"]])
        if flag_VLBA or flag_DSS:
            post_fun.append(post_scheduling_functions._vex_in_sked_format)
        if flag_VLBA:
            post_fun.append(post_scheduling_functions._vlba_vex_adjustments)

        # loop over all templates
        for xml in xmls:
            Message.addMessage("   processing file: {}".format(xml))
            xml = os.path.abspath(xml)
            p = subprocess.run([path_scheduler, xml],
                               cwd=xml_dir,
                               capture_output=True,
                               text=True)
            log = p.stdout
            if log:
                Message.addMessage(log, dump="log")
            errlog = p.stderr
            if errlog:
                Message.addMessage(errlog, dump="log")
            p.check_returncode()

            # rename statistics.csv and simulation_summary file to avoid name clashes
            statistic_in = os.path.join(xml_dir, "statistics.csv")
            statistic_out = "statistics_{}.csv".format(
                os.path.basename(os.path.splitext(xml)[0]))
            statistic_out = os.path.join(xml_dir, statistic_out)
            if os.path.exists(statistic_out):
                os.remove(statistic_out)
            os.rename(statistic_in, statistic_out)

            simulation_summary_in = os.path.join(xml_dir,
                                                 "simulation_summary.txt")
            simulation_summary_out = "simulation_summary_{}.txt".format(
                os.path.basename(os.path.splitext(xml)[0]))
            simulation_summary_out = os.path.join(xml_dir,
                                                  simulation_summary_out)
            if os.path.exists(simulation_summary_out):
                os.remove(simulation_summary_out)
            os.rename(simulation_summary_in, simulation_summary_out)

            # read statistics.csv file
            df = pd.read_csv(statistic_out, index_col=0)
            df_list.append(df)

        # concatenate all statistics.csv files
        stats = pd.concat(df_list)
        stats = stats.drop_duplicates()
        stats.sort_index(inplace=True)

        # find best schedule based on statistics
        best_idx = select_best(stats, template_path=template_path)
        Message.addMessage("best version: v{:03d}".format(best_idx))
        if upload:
            Message.addMessage(
                "this session will be uploaded on: {:%B %d, %Y}".format(
                    today +
                    datetime.timedelta(days=delta_days - delta_days_upload)))
            if delta_days - delta_days_upload < 1:
                Message.addMessage("[WARNING]: upload date already passed!")
        else:
            Message.addMessage("this session will NOT be uploaded!")

        summary_file = os.path.join(os.path.dirname(xml_dir), "summary.txt")
        summary_df = Helper.addStatistics(stats, best_idx, statistic_field,
                                          session["code"].upper(),
                                          summary_file)

        # copy best schedule to selected folder
        version_pattern = "_v{:03d}".format(best_idx)
        bestFiles = glob.glob(
            os.path.join(xml_dir, "*{}*".format(version_pattern)))
        xml_dir_selected = os.path.join(xml_dir, "selected")

        if os.path.exists(xml_dir_selected):
            shutil.rmtree(xml_dir_selected)

        os.makedirs(xml_dir_selected)
        for f in bestFiles:
            fname = os.path.basename(f).replace(version_pattern, "")
            destination = os.path.join(xml_dir_selected, fname)
            shutil.copy(f, destination)
        stats.to_csv(os.path.join(xml_dir_selected, "merged_statistics.csv"))

        if upload:
            Helper.update_uploadScheduler(xml_dir_selected,
                                          delta_days - delta_days_upload,
                                          upload)

        try:
            skdFile = os.path.join(xml_dir_selected,
                                   "{}.skd".format(session["code"].lower()))
            skd = skd_parser.skdParser(skdFile)
            skd.parse()
            Plotting.summary(summary_df, xml_dir_selected)
            Plotting.polar_plots(skd, xml_dir_selected, "duration")
            Plotting.polar_plots(skd, xml_dir_selected, "start_time")
            Plotting.close_all()
        except:
            Message.addMessage("#### ERROR ####")
            Message.addMessage(traceback.format_exc())

        for post_f in post_fun:
            post_f(path=xml_dir_selected,
                   ds=stats.loc[best_idx, :],
                   session=session,
                   program_code=code)

        SendMail.writeMail(xml_dir_selected, emails)
    if (args.fallback_email.count("@") == 1):
        args.fallback_email = [args.fallback_email]

    settings = setup()

    if args.observing_programs is None:
        print("No observing programs selected!")
        print(
            "Pass observing program name as written in settings.ini file using the '-p' flag"
        )
        print("e.g.: python VieSchedpp_AUTO.py -p INT1 INT2 INT3")
        sys.exit(0)

    if args.no_email:
        SendMail.changeSendMailsFlag(False)

    try:
        if not args.no_scheduling:
            print("===== START SCHEDULING =====")
            start_scheduling(settings)
        if not args.no_upload:
            print("===== START UPLOADING =====")
            start_uploading(settings)
        print("VieSched++ AUTO finished")

    except BaseException as err:
        Message.addMessage("#### ERROR ####")
        Message.addMessage(traceback.format_exc())
        SendMail.writeErrorMail(args.fallback_email)
def url_response(cat):
    """
    download a single file from https and store

    primarily used to download the CATALOG files

    :param cat: (output_path, download_url)
    :return: None
    """
    path, url = cat

    # only download file if current file was last modified longer than 23 hours ago
    Message.addMessage("HTTPS download: {}... ".format(os.path.basename(path)),
                       dump="download",
                       endLine=False)
    if os.path.exists(path):
        last_update = os.path.getmtime(path)
        now = datetime.datetime.now()
        new_update = time.mktime(now.timetuple())
        diff = new_update - last_update
        if diff < 23 * 3600:
            Message.addMessage(
                "up to date (last modified {:.2f} hours ago) -> no download".
                format(diff / 3600.0),
                dump="download")
            return

    try:
        # download new file
        r = requests.get(url, stream=True)
        if r.ok:
            with open(path, 'wb') as f:
                for ch in r:
                    f.write(ch)
                Message.addMessage("successful", dump="download")
        else:
            Message.addMessage("ERROR", dump="download")

    except requests.exceptions.RequestException as err:
        Message.addMessage("#### ERROR {} ####".format(err), dump="download")
        Message.addMessage(traceback.format_exc(), dump="download")
def upload_GOW_ftp(path):
    """
    upload to GOW server using ftp

    :param path: path to session
    :return: None
    """
    flag = True
    path = os.path.join(path, "selected")
    code = os.path.basename(os.path.dirname(path))

    skdFile = glob.glob(os.path.join(path, "*.skd"))[0]
    txtFile = os.path.splitext(skdFile)[0] + ".txt"
    vexFile = os.path.splitext(skdFile)[0] + ".vex"

    today = datetime.date.today()
    Message.addMessage("##### {} #####\n".format(code), dump="download")
    Message.addMessage("connecting to: 141.74.2.12\n", dump="download")

    pw = read_pw_from_file("GOW_ftp_pw.txt")
    if pw is not None:
        ftp = FTP("141.74.1.12")
        ftp.login("vlbi", pw)  # *** INSERT PASSWORD HERE (replace pw) ***
        ftp.set_pasv(True)

        Message.addMessage("uploading files to GOW ftp server",
                           dump="download")

        Message.addMessage("\nserver content before upload:", dump="log")
        # get a list of all files at FTP server
        content = []
        ftp.retrlines('LIST', content.append)
        for l1 in content:
            Message.addMessage(l1, dump="log")

        Message.addMessage("\nuploading:", dump="download")
        ftp.mkd(code)
        ftp.cwd(code)
        for file in [skdFile, txtFile, vexFile]:
            Message.addMessage("    {}... ".format(file),
                               endLine=False,
                               dump="download")
            with open(file, 'rb') as f:
                msg = ftp.storbinary('STOR {}'.format(os.path.basename(file)),
                                     f)
            Message.addMessage(msg, dump="download")
        ftp.cwd("..")

        # get a list of all files at FTP server
        Message.addMessage("\nserver content after upload:", dump="log")
        content = []
        ftp.retrlines('LIST', content.append)
        for l2 in content:
            Message.addMessage(l2, dump="log")
    else:
        Message.addMessage(
            "No password for GOW FTP server was provided. Please store password in a \"GOW_ftp_pw.txt\" "
            "file or insert password in source code (See file \"Transfer.py\" line with comment "
            "\"*** INSERT PASSWORD HERE (replace pw) ***\"",
            dump="log")