コード例 #1
0
ファイル: schedule.py プロジェクト: alexmaul/pytroll-schedule
def get_non_conflicting_groups(passes, delay=timedelta(seconds=0)):
    """Get the different non-conflicting solutions in a group of conflicting
    passes.
    """
    # Uses graphs and maximal clique finding with the Bron-Kerbosch algorithm.

    order = len(passes)

    if order == 1:
        return [passes]

    graph = Graph(order)

    for i, overpass in enumerate(sorted(passes, key=lambda x: x.risetime)):
        for j in range(i + 1, order):
            if not overpass.overlaps(passes[j], delay):
                graph.add_edge(i, j)

    groups = []
    for res in graph.bron_kerbosch(set(), set(graph.vertices), set()):
        grp = []
        for vertex in res:
            grp.append(passes[vertex])
        groups.append(sorted(grp))

    return groups
コード例 #2
0
def get_non_conflicting_groups(passes, delay=timedelta(seconds=0)):
    """Get the different non-conflicting solutions in a group of conflicting
    passes.
    """
    # Uses graphs and maximal clique finding with the Bron-Kerbosch algorithm.

    order = len(passes)

    if order == 1:
        return [passes]

    graph = Graph(order)

    for i, overpass in enumerate(sorted(passes, key=lambda x: x.risetime)):
        for j in range(i + 1, order):
            if not overpass.overlaps(passes[j], delay):
                graph.add_edge(i, j)

    groups = []
    for res in graph.bron_kerbosch(set(), set(graph.vertices), set()):
        grp = []
        for vertex in res:
            grp.append(passes[vertex])
        groups.append(sorted(grp))

    return groups
コード例 #3
0
def get_best_sched(overpasses, area_of_interest, delay, avoid_list=None):
    """Get the best schedule based on *area_of_interest*.
    """
    avoid_list = avoid_list or []
    passes = sorted(overpasses, key=lambda x: x.risetime)
    grs = conflicting_passes(passes, delay)
    logger.debug("conflicting %s", str(grs))
    ncgrs = [get_non_conflicting_groups(gr, delay) for gr in grs]
    logger.debug("non conflicting %s", str(ncgrs))
    n_vertices = len(passes)

    graph = Graph(n_vertices=n_vertices + 2)

    def add_arc(graph, p1, p2, hook=None):
        logger.debug("Adding arc between " + str(p1) +
                     " and " + str(p2) + "...")
        if p1 in avoid_list or p2 in avoid_list:
            w = 0
            logger.debug("...0 because in the avoid_list!")
        else:
            w = combine(p1, p2, area_of_interest)
        logger.debug("...with weight " + str(w))

#         with open("/tmp/schedule.gv", "a") as fp_:
#             fp_.write('        "' + str(p1) + '" -> "' + str(p2) +
#                       '" [ label = "' + str(w) + '" ];\n')

        graph.add_arc(passes.index(p1) + 1,
                      passes.index(p2) + 1, w)
        if hook is not None:
            hook()

    prev = set()
    for ncgr in ncgrs:
        for pr in prev:
            foll = set(gr[0] for gr in ncgr)
            for f in foll:
                add_arc(graph, pr, f)

        prev = set(sorted(gr, key=lambda x: x.falltime)[-1] for gr in ncgr)
        for gr in ncgr:
            if len(gr) > 1:
                for p1, p2 in zip(gr[:-1], gr[1:]):
                    add_arc(graph, p1, p2)

    for pr in prev:
        graph.add_arc(passes.index(pr) + 1, n_vertices + 1)
    for first in ncgrs[0][0]:
        graph.add_arc(0, passes.index(first) + 1)

    dist, path = graph.dag_longest_path(0, n_vertices + 1)

    del dist
    return [passes[idx - 1] for idx in path[1:-1]], (graph, passes)
コード例 #4
0
ファイル: schedule.py プロジェクト: alexmaul/pytroll-schedule
def get_best_sched(overpasses, area_of_interest, scores, delay, avoid_list=None):
    """Get the best schedule based on *area_of_interest*.
    """
    avoid_list = avoid_list or []
    print avoid_list
    raw_input()
    passes = sorted(overpasses, key=lambda x: x.risetime)
    grs = conflicting_passes(passes, delay)
    logger.debug("conflicting %s", str(grs))
    ncgrs = [get_non_conflicting_groups(gr, delay) for gr in grs]
    logger.debug("non conflicting %s", str(ncgrs))
    n_vertices = len(passes)

    graph = Graph(n_vertices=n_vertices + 2)

    def add_arc(graph, p1, p2, hook=None):
        logger.debug("Adding arc between " + str(p1) +
                     " and " + str(p2) + "...")
        if p1 in avoid_list or p2 in avoid_list:
            w = 0
            logger.debug("...0 because in the avoid_list!")
        else:
            w = combine(p1, p2, area_of_interest, scores)
        logger.debug("...with weight " + str(w))

        with open("/tmp/schedule.gv", "a") as fp_:
            fp_.write('        "' + str(p1) + '" -> "' + str(p2) +
                      '" [ label = "' + str(w) + '" ];\n')

        graph.add_arc(passes.index(p1) + 1,
                      passes.index(p2) + 1, w)
        if hook is not None:
            hook()

    prev = set()
    for ncgr in ncgrs:
        for pr in prev:
            foll = set(gr[0] for gr in ncgr)
            for f in foll:
                add_arc(graph, pr, f)

        prev = set(sorted(gr, key=lambda x: x.falltime)[-1] for gr in ncgr)
        for gr in ncgr:
            if len(gr) > 1:
                for p1, p2 in zip(gr[:-1], gr[1:]):
                    add_arc(graph, p1, p2)

    for pr in prev:
        graph.add_arc(passes.index(pr) + 1, n_vertices + 1)
    for first in ncgrs[0][0]:
        graph.add_arc(0, passes.index(first) + 1)

    dist, path = graph.dag_longest_path(0, n_vertices + 1)

    del dist
    return [passes[idx - 1] for idx in path[1:-1]], (graph, passes)
コード例 #5
0
def run():
    """The schedule command."""
    import argparse
    global logger

    parser = argparse.ArgumentParser()
    # general arguments
    parser.add_argument("-c",
                        "--config",
                        default=None,
                        help="configuration file to use")
    parser.add_argument("-t", "--tle", default=None, help="tle file to use")
    parser.add_argument("-l",
                        "--log",
                        default=None,
                        help="File to log to (defaults to stdout)")
    parser.add_argument("-m",
                        "--mail",
                        nargs="*",
                        default=None,
                        help="mail address(es) to send error messages to.")
    parser.add_argument("-v",
                        "--verbose",
                        action="store_true",
                        help="print debug messages too")
    # argument group: coordinates and times
    group_postim = parser.add_argument_group(
        title="start-parameter",
        description="(or set values in the configuration file)")
    group_postim.add_argument("--lat",
                              type=float,
                              help="Latitude, degrees north")
    group_postim.add_argument("--lon",
                              type=float,
                              help="Longitude, degrees east")
    group_postim.add_argument("--alt", type=float, help="Altitude, km")
    group_postim.add_argument("-f",
                              "--forward",
                              type=float,
                              help="time ahead to compute the schedule")
    group_postim.add_argument("-s",
                              "--start-time",
                              type=parse_datetime,
                              help="start time of the schedule to compute")
    group_postim.add_argument("-d",
                              "--delay",
                              default=60,
                              type=float,
                              help="delay (in seconds) needed between two " +
                              "consecutive passes (60 seconds by default)")
    # argument group: special behaviour
    group_spec = parser.add_argument_group(
        title="special",
        description="(additional parameter changing behaviour)")
    group_spec.add_argument("-a",
                            "--avoid",
                            help="xml request file with passes to avoid")
    group_spec.add_argument("--no-aqua-terra-dump",
                            action="store_false",
                            help="do not consider Aqua/Terra-dumps")
    group_spec.add_argument("--multiproc",
                            action="store_true",
                            help="use multiple parallel processes")
    # argument group: output-related
    group_outp = parser.add_argument_group(
        title="output",
        description="(file pattern are taken from configuration file)")
    group_outp.add_argument("-o",
                            "--output-dir",
                            default=None,
                            help="where to put generated files")
    group_outp.add_argument(
        "-u",
        "--output-url",
        default=None,
        help="URL where to put generated schedule file(s)" +
        ", otherwise use output-dir")
    group_outp.add_argument("-x",
                            "--xml",
                            action="store_true",
                            help="generate an xml request file (schedule)")
    group_outp.add_argument("-r",
                            "--report",
                            action="store_true",
                            help="generate an xml report file (schedule)")
    group_outp.add_argument("--scisys",
                            action="store_true",
                            help="generate a SCISYS schedule file")
    group_outp.add_argument("-p",
                            "--plot",
                            action="store_true",
                            help="generate plot images")
    group_outp.add_argument("-g",
                            "--graph",
                            action="store_true",
                            help="save graph info")
    group_outp.add_argument("--meos",
                            action="store_true",
                            help="generate a MEOS schedule file")
    group_outp.add_argument("--metno-xml",
                            action="store_true",
                            help="generate a METNO xml pass data file")
    opts = parser.parse_args()

    if opts.config:
        # read_config() returns:
        #     [(coords, station, area, scores)], forward, start, {pattern}
        # station_list, forward, start, pattern = utils.read_config(opts.config)
        scheduler = utils.read_config(opts.config)

    # TODO make config file compulsory

    if (not opts.config) and (not (opts.lon or opts.lat or opts.alt)):
        parser.error("Coordinates must be provided in the absence of "
                     "configuration file.")

    if not (opts.xml or opts.scisys or opts.report or opts.metno_xml):
        parser.error("No output specified, use '--scisys' or '-x/--xml'")

    if opts.output_dir is None:
        opts.output_dir = os.path.curdir
    if "dir_output" not in scheduler.patterns:
        pattern["dir_output"] = opts.output_dir

    if opts.log:
        previous = os.path.exists(opts.log)
        handler = logging.handlers.RotatingFileHandler(opts.log, backupCount=7)
        if previous:
            handler.doRollover()
    else:
        handler = logging.StreamHandler()
    handler.setFormatter(
        logging.Formatter(
            "[%(levelname)s: %(asctime)s :"
            " %(name)s] %(message)s", '%Y-%m-%d %H:%M:%S'))
    if opts.verbose:
        loglevel = logging.DEBUG
    else:
        loglevel = logging.INFO

    handler.setLevel(loglevel)
    logging.getLogger('').setLevel(loglevel)
    logging.getLogger('').addHandler(handler)

    if opts.mail:
        mhandler = logging.handlers.SMTPHandler(
            "localhost", "*****@*****.**", opts.mail,
            "Scheduler")
        mhandler.setLevel(logging.WARNING)
        logging.getLogger('').addHandler(mhandler)

    logger = logging.getLogger("trollsched")

    tle_file = opts.tle
    if opts.start_time:
        start_time = opts.start_time
    else:
        start_time = datetime.utcnow()

    allpasses = {}
    graph = {}

    logger.debug("start: %s forward: %s" %
                 (scheduler.start, scheduler.forward))

    pattern_args = {
        "output_dir": opts.output_dir,
        "date": start_time.strftime("%Y%m%d"),
        "time": start_time.strftime("%H%M%S")
    }
    dir_output = build_filename("dir_output", scheduler.patterns, pattern_args)
    if not os.path.exists(dir_output):
        logger.debug("Create output dir " + dir_output)
        os.makedirs(dir_output)

    if len(scheduler.stations) > 1:
        opts.comb = True
        import pickle
        ph = open(os.path.join(dir_output, "opts.pkl"), "wb")
        pickle.dump(opts, ph)
        ph.close()
    else:
        opts.comb = False

    scheduler.opts = opts

    # single- or multi-processing?
    if not opts.multiproc or len(scheduler.stations) == 1:
        # sequential processing all stations' single schedule.
        for station in scheduler.stations:
            graph[station.id], allpasses[station.id] = station.single_station(
                scheduler, start_time, tle_file)
    else:
        # processing the stations' single schedules with multiprocessing.
        process_single = {}
        statlst_ordered = []
        # first round through the stations, forking sub-processes to do the
        # "single station calculations" in parallel.
        # the pickling of passes and graphs is done inside single_station().
        for station in scheduler.stations:
            statlst_ordered.append(station.id)
            from multiprocessing import Process
            process_single[station.id] = Process(target=station.single_station,
                                                 args=(scheduler, start_time,
                                                       tle_file))
            process_single[station.id].start()
        # second round through the stations, collecting the sub-processes and
        # their results.
        for station_id in statlst_ordered:
            process_single[station_id].join()
            pattern_args["station"] = station_id
            # load graph for station
            graph[station_id] = Graph()
            graph[station_id].load(
                build_filename("file_graph", scheduler.patterns, pattern_args)
                + ".npz")
            # load pickled passes for station
            ph = open(
                os.path.join(dir_output, "allpasses.%s.pkl" % station_id),
                "rb")
            allpasses[station_id] = pickle.load(ph)
            ph.close()

    if opts.comb:
        combined_stations(scheduler, start_time, graph, allpasses)
コード例 #6
0
ファイル: combine.py プロジェクト: adybbroe/pytroll-schedule
def main():
    import logging
    import logging.handlers
    import os
    import pickle
    from trollsched.schedule import parse_datetime
    from trollsched.schedule import combined_stations, build_filename

    try:
        from trollsched.schedule import read_config
        import argparse
        logger = logging.getLogger("trollsched")
        parser = argparse.ArgumentParser()
        parser.add_argument("-c",
                            "--config",
                            default=None,
                            help="configuration file to use")
        parser.add_argument("-s",
                            "--start-time",
                            type=parse_datetime,
                            help="start time of the schedule to compute")
        parser.add_argument("-o",
                            "--output-dir",
                            default=None,
                            help="where to put generated files")
        parser.add_argument("-x",
                            "--xml",
                            action="store_true",
                            help="generate an xml request file (schedule)")
        parser.add_argument("-r",
                            "--report",
                            action="store_true",
                            help="generate an xml report file (schedule)")
        parser.add_argument("--scisys",
                            action="store_true",
                            help="generate a SCISYS schedule file")
        parser.add_argument("-p",
                            "--plot",
                            action="store_true",
                            help="generate plot images")
        parser.add_argument("-g",
                            "--graph",
                            action="store_true",
                            help="save graph info")
        opts = parser.parse_args()

        if opts.config is None:
            parser.error("Configuration file required.")
        if opts.start_time:
            start_time = opts.start_time
        else:
            start_time = datetime.utcnow()

        # [coords, station, area, scores], forward, start, pattern
        station_list, forward, start, pattern = read_config(opts.config)

        pattern_args = {
            "output_dir": opts.output_dir,
            "date": start_time.strftime("%Y%m%d"),
            "time": start_time.strftime("%H%M%S")
        }
        dir_output = build_filename("dir_output", pattern, pattern_args)
        if not os.path.exists(dir_output):
            print(dir_output, "does not exist!")
            sys.exit(1)
        ph = open(os.path.join(dir_output, "opts.pkl"), "rb")
        opts = pickle.load(ph)
        ph.close()

        graph = {}
        allpasses = {}
        for coords, station, area, scores in station_list:
            pattern_args["station"] = station
            graph[station] = Graph()
            graph[station].load(
                build_filename("file_graph", pattern, pattern_args) + ".npz")

            #             print "---",station,"---"
            #             print_matrix(graph[station].adj_matrix, ly=5)
            #             print_matrix(graph[station].weight_matrix, ly=5, lx=-1)

            #             allpasses[station] = get_passes_from_xml_file(os.path.join(opts.report, "acquisition-schedule-report." + station + ".xml"))
            #             print len(allpasses[station]),allpasses[station]

            #             for v in graph[station].neighbours(1):
            #                 print v, " : ", allpasses[station][v].risetime, "->", graph[station].weight(1, v)

            ph = open(
                os.path.join(
                    build_filename("dir_output", pattern, pattern_args),
                    "allpasses.%s.pkl" % station), "rb")
            allpasses[station] = pickle.load(ph)
            ph.close()

        from trollsched.schedule import conflicting_passes
        totpas = []
        for s, sp in allpasses.items():
            print("len(sp)", s, len(sp))
            totpas.extend(list(sp))
        passes = sorted(totpas, key=lambda x: x.risetime)
        cpg = conflicting_passes(passes, timedelta(seconds=600))
        print("ALLPASSES", len(allpasses))  # ,allpasses
        print("PASSES", len(passes))  # ,passes
        print("CONFLGRPS", len(cpg))  # ,cpg
        print("MAX", max([len(g) for g in cpg]))

        combined_stations(opts, pattern, station_list, graph, allpasses,
                          start_time, start, forward)

    except:
        logger.exception("Something wrong happened!")
        raise
コード例 #7
0
ファイル: combine.py プロジェクト: adybbroe/pytroll-schedule
def add_graphs(graphs, passes, delay=timedelta(seconds=0)):
    """Add all graphs to one combined graph. """
    statlst = graphs.keys()

    def count_neq_passes(pl):
        """Counts how many satellite passes in a list are really distinct (satellite/epoch)."""
        if len(pl):
            r = []
            s = 1
            for q in pl[1:]:
                if pl[0] != q:
                    r.append(q)
            s += count_neq_passes(r)
            return s
        else:
            return 0

    for s, g in graphs.items():
        logger.debug("station: %s, order: %d", s, g.order)

    # Graphs and allpasses are hashmaps of sets, or similar, but we need
    # lists of lists, forthat they are copied.
    grl = []
    pl = []
    for s in statlst:
        grl.append(graphs[s])
        pl.append(sorted(passes[s], key=lambda x: x.risetime))

    # Rough estimate for the size of the combined passes' graph.
    n_vertices = 1
    for g in grl:
        n_vertices += g.order
    n_vertices *= len(statlst) * 2
    newgraph = Graph(n_vertices=n_vertices)

    logger.debug("newgraph order: %d", newgraph.order)

    # This value signals the end, when no more passes from any antenna are available.
    stopper = tuple((None, None) for s in range(len(statlst)))

    # The new passes list, it'll be filled with tuples, each with of one pass per antenna.
    # It's initialized with the first passes.
    #
    # TODO: ideally something like next line, but this doesn't work faultless
    # if one or more stations have multiple "first passes":
    # newpasses = [tuple((pl[s][p - 1], None) for s in range(len(statlst)) for p in grl[s].neighbours(0))]
    #
    # TODO: not "just the first vertix" with the line:
    # parlist = [newpasses[0]]
    #
    newpasses = [
        tuple((pl[s][grl[s].neighbours(0)[0] - 1], None)
              for s in range(len(statlst)))
    ]
    parlist = [newpasses[0]]
    while len(parlist):
        newparlist = []
        for parnode in parlist:
            if parnode == stopper:
                # All antennas reached the end of passes list in this path of
                # possibilities.
                # stopper == ((None,None) * stations)
                #
                # If this happens for all elements of parlist, newparlist will
                # stay empty and (at the bottom of this loop) replace parlist,
                # which as an empty list will cause the surrounding while-loop
                # to end.
                continue

            collected_newnodes = collect_nodes(0, parnode, grl, newgraph,
                                               newpasses, pl, delay)

            for newnode_list in collected_newnodes:
                newnode = tuple(newnode_list)
                if newnode not in newpasses:
                    newpasses.append(newnode)

                if newnode not in newparlist:
                    newparlist.append(newnode)

                # Collecting the weights from each stations weight-matrix ...
                # (could be more compact if it weren't for the None-values)
                wl = []
                for s, p, n in zip(range(len(statlst)), parnode, newnode):
                    try:
                        if n[0] is None:
                            wl.append(0)
                        else:
                            wl.append(n[1] or grl[s].weight(
                                pl[s].index(p[0]) + 1, pl[s].index(n[0]) + 1))
                    except:
                        logger.error(
                            "Collecting weights: stat %d - parnode %s %s - newnode %s %s",
                            s,
                            parnode,
                            p,
                            newnode,
                            n,
                            exc_info=1)
                        raise
                # Apply vertix-count to the sum of collected weights.
                # vertix-count: number of vertices with reference to same
                # satellite pass, it can result to 0, 1, 2.
                w = sum(wl) / 2**(
                    (2 * len(parnode)) - count_neq_passes(parnode) -
                    count_neq_passes(newnode))

                # TODO: if the starting point isn't "just the first vertix",
                # the comparison must be changed
                if parnode == newpasses[0]:
                    # "virtual" weight for the starting point.
                    newgraph.add_arc(0, newpasses.index(parnode) + 1, w)

                newgraph.add_arc(
                    newpasses.index(parnode) + 1,
                    newpasses.index(newnode) + 1, w)

        parlist = newparlist

    logger.debug("newpasses length: %d", len(newpasses))

    return statlst, newgraph, newpasses
コード例 #8
0
ファイル: combine.py プロジェクト: pytroll/pytroll-schedule
def add_graphs(graphs, passes, delay=timedelta(seconds=0)):
    """Add all graphs to one combined graph. """
    statlst = graphs.keys()

    def count_neq_passes(pl):
        """Counts how many satellite passes in a list are really distinct (satellite/epoch)."""
        if len(pl):
            r = []
            s = 1
            for q in pl[1:]:
                if pl[0] != q:
                    r.append(q)
            s += count_neq_passes(r)
            return s
        else:
            return 0

    for s, g in graphs.items():
        logger.debug("station: %s, order: %d", s, g.order)

    # Graphs and allpasses are hashmaps of sets, or similar, but we need
    # lists of lists, forthat they are copied.
    grl = []
    pl = []
    for s in statlst:
        grl.append(graphs[s])
        pl.append(sorted(passes[s], key=lambda x: x.risetime))

    # Rough estimate for the size of the combined passes' graph.
    n_vertices = 1
    for g in grl:
        n_vertices += g.order
    n_vertices *= len(statlst) * 2
    newgraph = Graph(n_vertices=n_vertices)

    logger.debug("newgraph order: %d", newgraph.order)

    # This value signals the end, when no more passes from any antenna are available.
    stopper = tuple((None, None) for s in range(len(statlst)))

    # The new passes list, it'll be filled with tuples, each with of one pass per antenna.
    # It's initialized with the first passes.
    #
    # TODO: ideally something like next line, but this doesn't work faultless
    # if one or more stations have multiple "first passes":
    # newpasses = [tuple((pl[s][p - 1], None) for s in range(len(statlst)) for p in grl[s].neighbours(0))]
    #
    # TODO: not "just the first vertix" with the line:
    # parlist = [newpasses[0]]
    #
    newpasses = [tuple((pl[s][grl[s].neighbours(0)[0] - 1], None) for s in range(len(statlst)))]
    parlist = [newpasses[0]]
    while len(parlist):
        newparlist = []
        for parnode in parlist:
            if parnode == stopper:
                # All antennas reached the end of passes list in this path of
                # possibilities.
                # stopper == ((None,None) * stations)
                #
                # If this happens for all elements of parlist, newparlist will
                # stay empty and (at the bottom of this loop) replace parlist,
                # which as an empty list will cause the surrounding while-loop
                # to end.
                continue

            collected_newnodes = collect_nodes(0, parnode, grl, newgraph, newpasses, pl, delay)

            for newnode_list in collected_newnodes:
                newnode = tuple(newnode_list)
                if newnode not in newpasses:
                    newpasses.append(newnode)

                if newnode not in newparlist:
                    newparlist.append(newnode)

                # Collecting the weights from each stations weight-matrix ...
                # (could be more compact if it weren't for the None-values)
                wl = []
                for s, p, n in zip(range(len(statlst)), parnode, newnode):
                    try:
                        if n[0] is None:
                            wl.append(0)
                        else:
                            wl.append(n[1] or grl[s].weight(pl[s].index(p[0]) + 1, pl[s].index(n[0]) + 1))
                    except:
                        logger.error(
                            "Collecting weights: stat %d - parnode %s %s - newnode %s %s", s, parnode, p, newnode, n, exc_info=1)
                        raise
                # Apply vertix-count to the sum of collected weights.
                # vertix-count: number of vertices with reference to same
                # satellite pass, it can result to 0, 1, 2.
                w = sum(wl) / 2 ** ((2 * len(parnode)) - count_neq_passes(parnode) - count_neq_passes(newnode))

                # TODO: if the starting point isn't "just the first vertix",
                # the comparison must be changed
                if parnode == newpasses[0]:
                    # "virtual" weight for the starting point.
                    newgraph.add_arc(0, newpasses.index(parnode) + 1, w)

                newgraph.add_arc(newpasses.index(parnode) + 1, newpasses.index(newnode) + 1, w)

        parlist = newparlist

    logger.debug("newpasses length: %d", len(newpasses))

    return statlst, newgraph, newpasses