def get_best_sched(overpasses, area_of_interest, scores, delay, avoid_list=None): """Get the best schedule based on *area_of_interest*. """ avoid_list = avoid_list or [] print avoid_list raw_input() passes = sorted(overpasses, key=lambda x: x.risetime) grs = conflicting_passes(passes, delay) logger.debug("conflicting %s", str(grs)) ncgrs = [get_non_conflicting_groups(gr, delay) for gr in grs] logger.debug("non conflicting %s", str(ncgrs)) n_vertices = len(passes) graph = Graph(n_vertices=n_vertices + 2) def add_arc(graph, p1, p2, hook=None): logger.debug("Adding arc between " + str(p1) + " and " + str(p2) + "...") if p1 in avoid_list or p2 in avoid_list: w = 0 logger.debug("...0 because in the avoid_list!") else: w = combine(p1, p2, area_of_interest, scores) logger.debug("...with weight " + str(w)) with open("/tmp/schedule.gv", "a") as fp_: fp_.write(' "' + str(p1) + '" -> "' + str(p2) + '" [ label = "' + str(w) + '" ];\n') graph.add_arc(passes.index(p1) + 1, passes.index(p2) + 1, w) if hook is not None: hook() prev = set() for ncgr in ncgrs: for pr in prev: foll = set(gr[0] for gr in ncgr) for f in foll: add_arc(graph, pr, f) prev = set(sorted(gr, key=lambda x: x.falltime)[-1] for gr in ncgr) for gr in ncgr: if len(gr) > 1: for p1, p2 in zip(gr[:-1], gr[1:]): add_arc(graph, p1, p2) for pr in prev: graph.add_arc(passes.index(pr) + 1, n_vertices + 1) for first in ncgrs[0][0]: graph.add_arc(0, passes.index(first) + 1) dist, path = graph.dag_longest_path(0, n_vertices + 1) del dist return [passes[idx - 1] for idx in path[1:-1]], (graph, passes)
def get_best_sched(overpasses, area_of_interest, delay, avoid_list=None): """Get the best schedule based on *area_of_interest*. """ avoid_list = avoid_list or [] passes = sorted(overpasses, key=lambda x: x.risetime) grs = conflicting_passes(passes, delay) logger.debug("conflicting %s", str(grs)) ncgrs = [get_non_conflicting_groups(gr, delay) for gr in grs] logger.debug("non conflicting %s", str(ncgrs)) n_vertices = len(passes) graph = Graph(n_vertices=n_vertices + 2) def add_arc(graph, p1, p2, hook=None): logger.debug("Adding arc between " + str(p1) + " and " + str(p2) + "...") if p1 in avoid_list or p2 in avoid_list: w = 0 logger.debug("...0 because in the avoid_list!") else: w = combine(p1, p2, area_of_interest) logger.debug("...with weight " + str(w)) # with open("/tmp/schedule.gv", "a") as fp_: # fp_.write(' "' + str(p1) + '" -> "' + str(p2) + # '" [ label = "' + str(w) + '" ];\n') graph.add_arc(passes.index(p1) + 1, passes.index(p2) + 1, w) if hook is not None: hook() prev = set() for ncgr in ncgrs: for pr in prev: foll = set(gr[0] for gr in ncgr) for f in foll: add_arc(graph, pr, f) prev = set(sorted(gr, key=lambda x: x.falltime)[-1] for gr in ncgr) for gr in ncgr: if len(gr) > 1: for p1, p2 in zip(gr[:-1], gr[1:]): add_arc(graph, p1, p2) for pr in prev: graph.add_arc(passes.index(pr) + 1, n_vertices + 1) for first in ncgrs[0][0]: graph.add_arc(0, passes.index(first) + 1) dist, path = graph.dag_longest_path(0, n_vertices + 1) del dist return [passes[idx - 1] for idx in path[1:-1]], (graph, passes)
def add_graphs(graphs, passes, delay=timedelta(seconds=0)): """Add all graphs to one combined graph. """ statlst = graphs.keys() def count_neq_passes(pl): """Counts how many satellite passes in a list are really distinct (satellite/epoch).""" if len(pl): r = [] s = 1 for q in pl[1:]: if pl[0] != q: r.append(q) s += count_neq_passes(r) return s else: return 0 for s, g in graphs.items(): logger.debug("station: %s, order: %d", s, g.order) # Graphs and allpasses are hashmaps of sets, or similar, but we need # lists of lists, forthat they are copied. grl = [] pl = [] for s in statlst: grl.append(graphs[s]) pl.append(sorted(passes[s], key=lambda x: x.risetime)) # Rough estimate for the size of the combined passes' graph. n_vertices = 1 for g in grl: n_vertices += g.order n_vertices *= len(statlst) * 2 newgraph = Graph(n_vertices=n_vertices) logger.debug("newgraph order: %d", newgraph.order) # This value signals the end, when no more passes from any antenna are available. stopper = tuple((None, None) for s in range(len(statlst))) # The new passes list, it'll be filled with tuples, each with of one pass per antenna. # It's initialized with the first passes. # # TODO: ideally something like next line, but this doesn't work faultless # if one or more stations have multiple "first passes": # newpasses = [tuple((pl[s][p - 1], None) for s in range(len(statlst)) for p in grl[s].neighbours(0))] # # TODO: not "just the first vertix" with the line: # parlist = [newpasses[0]] # newpasses = [ tuple((pl[s][grl[s].neighbours(0)[0] - 1], None) for s in range(len(statlst))) ] parlist = [newpasses[0]] while len(parlist): newparlist = [] for parnode in parlist: if parnode == stopper: # All antennas reached the end of passes list in this path of # possibilities. # stopper == ((None,None) * stations) # # If this happens for all elements of parlist, newparlist will # stay empty and (at the bottom of this loop) replace parlist, # which as an empty list will cause the surrounding while-loop # to end. continue collected_newnodes = collect_nodes(0, parnode, grl, newgraph, newpasses, pl, delay) for newnode_list in collected_newnodes: newnode = tuple(newnode_list) if newnode not in newpasses: newpasses.append(newnode) if newnode not in newparlist: newparlist.append(newnode) # Collecting the weights from each stations weight-matrix ... # (could be more compact if it weren't for the None-values) wl = [] for s, p, n in zip(range(len(statlst)), parnode, newnode): try: if n[0] is None: wl.append(0) else: wl.append(n[1] or grl[s].weight( pl[s].index(p[0]) + 1, pl[s].index(n[0]) + 1)) except: logger.error( "Collecting weights: stat %d - parnode %s %s - newnode %s %s", s, parnode, p, newnode, n, exc_info=1) raise # Apply vertix-count to the sum of collected weights. # vertix-count: number of vertices with reference to same # satellite pass, it can result to 0, 1, 2. w = sum(wl) / 2**( (2 * len(parnode)) - count_neq_passes(parnode) - count_neq_passes(newnode)) # TODO: if the starting point isn't "just the first vertix", # the comparison must be changed if parnode == newpasses[0]: # "virtual" weight for the starting point. newgraph.add_arc(0, newpasses.index(parnode) + 1, w) newgraph.add_arc( newpasses.index(parnode) + 1, newpasses.index(newnode) + 1, w) parlist = newparlist logger.debug("newpasses length: %d", len(newpasses)) return statlst, newgraph, newpasses
def add_graphs(graphs, passes, delay=timedelta(seconds=0)): """Add all graphs to one combined graph. """ statlst = graphs.keys() def count_neq_passes(pl): """Counts how many satellite passes in a list are really distinct (satellite/epoch).""" if len(pl): r = [] s = 1 for q in pl[1:]: if pl[0] != q: r.append(q) s += count_neq_passes(r) return s else: return 0 for s, g in graphs.items(): logger.debug("station: %s, order: %d", s, g.order) # Graphs and allpasses are hashmaps of sets, or similar, but we need # lists of lists, forthat they are copied. grl = [] pl = [] for s in statlst: grl.append(graphs[s]) pl.append(sorted(passes[s], key=lambda x: x.risetime)) # Rough estimate for the size of the combined passes' graph. n_vertices = 1 for g in grl: n_vertices += g.order n_vertices *= len(statlst) * 2 newgraph = Graph(n_vertices=n_vertices) logger.debug("newgraph order: %d", newgraph.order) # This value signals the end, when no more passes from any antenna are available. stopper = tuple((None, None) for s in range(len(statlst))) # The new passes list, it'll be filled with tuples, each with of one pass per antenna. # It's initialized with the first passes. # # TODO: ideally something like next line, but this doesn't work faultless # if one or more stations have multiple "first passes": # newpasses = [tuple((pl[s][p - 1], None) for s in range(len(statlst)) for p in grl[s].neighbours(0))] # # TODO: not "just the first vertix" with the line: # parlist = [newpasses[0]] # newpasses = [tuple((pl[s][grl[s].neighbours(0)[0] - 1], None) for s in range(len(statlst)))] parlist = [newpasses[0]] while len(parlist): newparlist = [] for parnode in parlist: if parnode == stopper: # All antennas reached the end of passes list in this path of # possibilities. # stopper == ((None,None) * stations) # # If this happens for all elements of parlist, newparlist will # stay empty and (at the bottom of this loop) replace parlist, # which as an empty list will cause the surrounding while-loop # to end. continue collected_newnodes = collect_nodes(0, parnode, grl, newgraph, newpasses, pl, delay) for newnode_list in collected_newnodes: newnode = tuple(newnode_list) if newnode not in newpasses: newpasses.append(newnode) if newnode not in newparlist: newparlist.append(newnode) # Collecting the weights from each stations weight-matrix ... # (could be more compact if it weren't for the None-values) wl = [] for s, p, n in zip(range(len(statlst)), parnode, newnode): try: if n[0] is None: wl.append(0) else: wl.append(n[1] or grl[s].weight(pl[s].index(p[0]) + 1, pl[s].index(n[0]) + 1)) except: logger.error( "Collecting weights: stat %d - parnode %s %s - newnode %s %s", s, parnode, p, newnode, n, exc_info=1) raise # Apply vertix-count to the sum of collected weights. # vertix-count: number of vertices with reference to same # satellite pass, it can result to 0, 1, 2. w = sum(wl) / 2 ** ((2 * len(parnode)) - count_neq_passes(parnode) - count_neq_passes(newnode)) # TODO: if the starting point isn't "just the first vertix", # the comparison must be changed if parnode == newpasses[0]: # "virtual" weight for the starting point. newgraph.add_arc(0, newpasses.index(parnode) + 1, w) newgraph.add_arc(newpasses.index(parnode) + 1, newpasses.index(newnode) + 1, w) parlist = newparlist logger.debug("newpasses length: %d", len(newpasses)) return statlst, newgraph, newpasses