예제 #1
0
파일: 3_5.py 프로젝트: koucs/ctci
    def test_1(self):
        queue = MyQueue()
        queue.enqueue(1)
        queue.enqueue(2)
        queue.enqueue(3)
        queue.enqueue(4)

        self.assertEqual(1, queue.dequeue())
        self.assertEqual(2, queue.dequeue())
        self.assertEqual(3, queue.dequeue())
        self.assertEqual(4, queue.dequeue())
        return
예제 #2
0
 def test_queue_ops(self):
     """ -- Verifica que el primer elemento ingresado en la cola sea el primero en salir
     """
     L = LinkedList()
     enqueue(L, "hola")
     enqueue(L, "jorge")
     enqueue(L, "como")
     first = dequeue(L)
     second = dequeue(L)
     third = dequeue(L)
     fourth = dequeue(L)
     self.assertEqual([first, second, third, fourth],
                      ["hola", "jorge", "como", None])
예제 #3
0
def go(num_pages_to_crawl, course_map_filename, index_filename):
    '''
    Crawl the college catalog and generate a CSV file with an index.

    Inputs:
        num_pages_to_crawl: the number of pages to process during the crawl
        course_map_filename: the name of a JSON file that contains the mapping of
          course codes to course identifiers
        index_filename: the name for the CSV of the index.

    Outputs:

    \

        CSV file of the index
    '''

    starting_url = ("http://www.classes.cs.uchicago.edu/archive/2015/winter"
                    "/12200-1/new.collegecatalog.uchicago.edu/index.html")
    limiting_domain = "classes.cs.uchicago.edu"

    queue = Queue()
    course_dict = {}
    analyze_page(starting_url, queue, limiting_domain, course_dict)

    counter = 1
    while not queue.isEmpty() and counter <= num_pages_to_crawl:
        counter += 1
        page = queue.dequeue()
        analyze_page(page, queue, limiting_domain, course_dict)

    convert_to_csv(course_dict)
예제 #4
0
 def breadth_first(self):
     queue = Queue.Queue(self)
     
     while queue.isEmpty() == False:
         node=queue.dequeue()
         print(str(node.key))
         if node.has_lchild():
             queue.enqueue(node.lchild)
         if node.has_rchild():
             queue.enqueue(node.rchild)
예제 #5
0
def breadth_first_order(tnode):
    """
    Display the nodes of a tree in breadth-first-order.
    :param tnode: a primitive tree
    :return: nothing
    """
    nodes = Queue.create()
    Queue.enqueue(nodes, tnode)
    order = Queue.create()
    #
    while Queue.size(nodes) > 0:
        current = Queue.dequeue(nodes)
        if current is not None:
            Queue.enqueue(order, tn.get_data(current))
            Queue.enqueue(nodes, tn.get_left(current))
            Queue.enqueue(nodes, tn.get_right(current))

    while not Queue.is_empty(order):
        n = Queue.dequeue(order)
        print(n, end=" ")
예제 #6
0
 def layerEgrodic(self):
     self.ls = []
     import queue
     queue = queue.Queue()
     queue.enqueue(self.root)
     while queue.size() != 0:
         curr_node = queue.dequeue()
         self.ls.append(curr_node.key)
         if curr_node.left != None: queue.enqueue(curr_node.left)
         if curr_node.right != None: queue.enqueue(curr_node.right)
     return self.ls
예제 #7
0
    def test_queue_usage(self):
        queue = QueueTwoStacks()

        queue.enqueue(1)
        queue.enqueue(2)
        queue.enqueue(3)

        actual = queue.dequeue()
        expected = 1
        self.assertEqual(actual, expected)

        actual = queue.dequeue()
        expected = 2
        self.assertEqual(actual, expected)

        queue.enqueue(4)

        actual = queue.dequeue()
        expected = 3
        self.assertEqual(actual, expected)

        actual = queue.dequeue()
        expected = 4
        self.assertEqual(actual, expected)

        with self.assertRaises(Exception):
            queue.dequeue()
예제 #8
0
 def levelByLevel(self, aFile):
     ''' Print the nodes of the BTree level-by-level on aFile. )
     '''
     aFile.write("A level-by-level listing of the nodes:\n")
     queue = MyQueue()
     queue.enqueue(self.rootNode)
     while not queue.isEmpty():
         currentNode = queue.dequeue()
         aFile.write(str(currentNode))
         for i in range(currentNode.getNumberOfKeys() + 1):
             child = self.readFrom(currentNode.child[i])
             if child != None:
                 queue.enqueue(child)
예제 #9
0
def findRouteBFS(root, node):
    queue = Queue()
    root.visited = True
    queue.enqueue(root)
    while not queue.isEmpty():
        r = queue.dequeue()
        if r == node:
            return True
        for x in r.children:
            if x.visited != True:
                x.visited = True
                queue.enqueue(x)
    return False
def thread(thread_name, index):
    job_data = q.dequeue()

    if job_data:
        job_data = json.loads(job_data)

        job_data["status"] = "in-progress"
        job_data["updated_at"] = str(datetime.datetime.now())
        update_job_status(json.dumps(job_data))

        exit_code = execute(job_data)

        if exit_code == 0:
            job_data["status"] = "completed"
        else:
            job_data["status"] = "failed"
        job_data["updated_at"] = str(datetime.datetime.now())
        update_job_status(json.dumps(job_data))
def scheduling(listTask, time):
    print(f"waktu proses cpu = {time}")
    print(f"antrian proses : {listTask.values()}")
    counter = 1
    totalTime = 0
    listName = qq.createQueue()
    for i in listTask:
        qq.enqueue(listName, i)
    #print(listName)
    #print(listTask)
    #print(' ')

    while not qq.isEmpty(listName):
        print(f"iterasi ke - {counter}")
        counter += 1

        name = qq.dequeue(listName)
        value = listTask[name][1]
        print(
            f"proses {name} sedang dikerjakan, sisa waktu proses {name} = {value}"
        )

        if value > time:
            totalTime += time

            value = value - time
            listTask[name][1] = value

            qq.enqueue(listName, name)
            print(f"antrian data tersisa : {listName}")
            print(f"sisa task {listTask}")

        elif value <= time:
            totalTime += value

            listTask[name][1] = 0
            listTask[name][2] = totalTime

            print(f"proses {name} selesai")
            print(f"antrian data tersisa : {listName}")
            print(f"sisa task {listTask}")

    return listTask
예제 #12
0
def fcfs():
    while True:
        job_data = q.dequeue()

        if job_data:
            job_data = json.loads(job_data)

            job_data["status"] = "in-progress"
            job_data["updated_at"] = str(datetime.datetime.now())
            update_job_status(json.dumps(job_data))

            exit_code = execute(job_data)

            if exit_code == 0:
                job_data["status"] = "completed"
            else:
                job_data["status"] = "failed"
            job_data["updated_at"] = str(datetime.datetime.now())
            update_job_status(json.dumps(job_data))

        time.sleep(5)
예제 #13
0
def bfs(maze, queue):
    length = len(maze)
    start = maze[0][0]
    goal = (9, 9)
    queue.enqueue(start)

    while queue.isEmpty() == False:
        node = queue.dequeue()
        node.searched = True
        for x, y in (node.x + 1, node.y), (node.x - 1,
                                           node.y), (node.x,
                                                     node.y - 1), (node.x,
                                                                   node.y + 1):
            if (0 <= x < length and 0 <= y < length) and (
                    maze[x][y].wall == False) and (maze[x][y].searched
                                                   == False):
                if (x, y) == goal:
                    return True
                else:
                    maze[x][y].parent = node
                    queue.enqueue(maze[x][y])
    return False
예제 #14
0
    def bfs(self, s):
        # breadth first search
        print("From bfs(self, s):")
        assert (s in self.vertex_names)
        s_ind = self.vertex_names.index(s)

        self.color = ["NIL"] * len(self.vertex_names)
        self.d = [-1] * len(self.vertex_names)
        self.pi = ["NIL"] * len(self.vertex_names)

        for i, u in enumerate(self.vertex_names):
            if (u != s):
                self.color[i] = "WHITE"
                self.d[i] = -1  # instead of inf
                self.pi[i] = "NIL"

        self.color[s_ind] = "GRAY"
        self.d[s_ind] = 0
        self.pi[s_ind] = "NIL"
        Q = []
        enqueue(Q, s)

        while (len(Q) != 0):
            u = dequeue(Q)
            u_ind = self.vertex_names.index(u)
            for v in self.vertices[u_ind].adj:
                v_ind = self.vertex_names.index(v)
                if (self.color[v_ind] == "WHITE"):
                    self.color[v_ind] = "GREY"
                    self.d[v_ind] = self.d[u_ind] + 1
                    self.pi[v_ind] = u
                    enqueue(Q, v)
            self.color[u_ind] = "BLACK"

        for i, v in enumerate(self.vertex_names):
            print("v: " + str(v) + " d: " + str(self.d[i]) + " p: " +
                  str(self.pi[i]))
        print("")
예제 #15
0
def main():
    logging.debug("windninja.main()")  #NOTE: THIS DEBUG STATEMENT WILL NEVER GET INTO THE LOG FILE BUT WILL OUTPUT TO STDOUT
    start = datetime.datetime.now()

    # argument parsing
    parser = argparse.ArgumentParser(description="WindNinja Server Wrapper")
    parser.add_argument("id", help="id of the windninja run")
    parser.add_argument("-l", "--log_level", choices=["debug", "info", "warn", "none"], default="none", help="Logging level")
    parser.add_argument("-p", "--pretty_print", action='store_true', help="Pretty print job file")

    #---------------------------------------------------------------------------------
    #IMPORTANT: if args are bad, process will exit without much in the way of logging 
    #   so when run from queue or web be sure to validate command line is correctly
    #   formatted...
    #TODO: create custome parser that logs command line errors to file
    #---------------------------------------------------------------------------------
    args = parser.parse_args()
    logging.debug(str(args))

    project = None
    status = JobStatus.failed
    msg = None

    try: 

        id = args.id.replace("-", "")
        project_path = os.path.join(CONFIG.JOBS_DIRECTORY, id)

        log_level = getattr(logging, args.log_level.upper(), 0)
        if log_level:
            logger.enable_file(project_path, log_level)

        #-----------------------------------------------------------------------
        #IMPORTANT:  FAILURES BEFORE THIS POINT WILL NOT BE LOGGED TO TEXT FILE
        #-----------------------------------------------------------------------

        logging.info("Begin - version {}".format(VERSION))
        logging.debug("project path: {}".format(project_path))

        project = Project(project_path)
        project.pretty_print = args.pretty_print
        project.openJob()
               
        if project is None or project.job is None or project.error is not None:
            logging.error("Exiting: Unable to open project file: {}".format(project.error))
            project = None
        elif project.job["status"] != JobStatus.new.name:
            logging.error("Exiting: Project is not NEW: status={}".format(project.job["status"]))
            project = None
        else:
            project.updateJob(JobStatus.executing.name, (logging.INFO, "Initializing WindNinja Run" ), True)

            # create the cli output folder
            wncli_folder = os.path.join(project_path, "wncli")
            os.makedirs(wncli_folder)
            
            result = createDem(project.bbox, wncli_folder)
            if result[0]:
                project.demPath = result[1]
                project.updateJob(None, (logging.INFO, "DEM created"), True)

                # execute the cli 
                override_args = {ptr.split(":")[0]: ptr.split(":")[1] for ptr in project.parameters.split(";")}
                #TODO: rethink "products" 
                output_shp = project.products.get("vector", False)
                output_asc = project.products.get("clustered", False)
                output_wx = project.products.get("weather", False)
                        
                result = execute_wncli(wncli_folder, override_args, project.demPath, project.forecast, output_shp, output_asc, output_wx)
                
                #result:
                # 0 : status [True | False]
                # 1 : output_folder | error message [string]
                # 2 : simulations [list of datetime]
                # 3 : windninja_shapefiles [list of string]
                # 4 : windninja_ascfiles [list of string]
                # 5 : weather_shapefiles [list of string]

                if result[0]:
                    project.updateJob(None, (logging.INFO, "WindNinjaCLI executed"), True)
                    results_folder = result[1]
                    
                    # add the simulation times/zone info
                    simulations = result[2]
                    simulations.sort()
                    
                    # initialize some variables used across products
                    wx_infos = wn_infos = None
                    wx_max_speed = wn_max_speed = 0

                    project.output = {
                        "simulations" : {
                            "times" : ["{:%Y%m%dT%H%M}".format(d) for d in simulations], 
                            "utcOffset" : "{:%z}".format(result[2][0])
                        }
                    }

                    # generate the desired output products

                    # weather results as geojson vectors
                    #TODO: even though the wx data is small (a few hundred points) if it was aggregated to 
                    #       a single file it might help with performance... and size could be reduced if
                    #       using a denormalized format - the geometry json is approx 1/2 the file size.
                    #       
                    if project.products.get("weather",False):
                        converted_weather = processShapefiles(results_folder, result[5], project.path, True, where="speed>0", zip_name="wx_geojson.zip")
                        if converted_weather[0]:
                            project.updateJob(None, (logging.INFO, "Weather converted to geojson"), True)
                            wx_infos = converted_weather[2]
                            wx_max_speed = converted_weather[3]
                            output = project.output["weather"] = {
                                "name": "Weather Json Vectors",
                                "type": "vector",
                                "format": "json",
                                "package": os.path.basename(converted_weather[4]),
                                "files": converted_weather[1],
                                "data": {
                                    "maxSpeed": {
                                       "overall": wx_max_speed
                                    }
                                }
                            }
                            for i in wx_infos:
                                name = i.replace("shp", "json")
                                output["data"]["maxSpeed"][name] = wx_infos[i]["max"]
                        else:
                            project.updateJob(None, (logging.ERROR, converted_weather[1]), True)

                    # windninja resutls as geojson vectors
                    if project.products.get("vector",False):
                        converted_windninja = processShapefiles(results_folder, result[3], project.path, True, zip_name="wn_geojson.zip")
                        if converted_windninja[0]:
                            project.updateJob(None, (logging.INFO, "Output converted to geojson"), True)
                            wn_infos = converted_windninja[2]
                            wn_max_speed = converted_windninja[3]
                            output = project.output["vector"] = {
                                "name": "WindNinja Json Vectors",
                                "type": "vector",
                                "format": "json",
                                "package": os.path.basename(converted_windninja[4]),
                                "files": converted_windninja[1],
                                "data": {
                                    "maxSpeed": {
                                        "overall": wn_max_speed
                                    }
                                }
                            }
                            for i in wn_infos:
                                name = i.replace("shp", "json")
                                output["data"]["maxSpeed"][name] = wn_infos[i]["max"]
                        else:
                            project.updateJob(None, (logging.ERROR, converted_windninja[1]), True)

                    # topofire tiles
                    #TODO: this one could be kicked off in a parrallel process as it doesn't rely on the WN output
                    #TODO: Tile fetch is surprisingly quick but could create a local cache that is auto built as requests come in.
                    if project.products.get("topofire", False):
                        from tilegrabber import grab_tiles
                        topofire_zip_file = grab_tiles(project.bbox, project.path, "topofire")
                        
                        if topofire_zip_file:
                            project.updateJob(None, (logging.INFO, "TopoFire tiles compiled"), True)

                            project.output["topofire"] = {
                                "name": "TopoFire Basemap",
                                "type": "basemap",
                                "format": "tiles",
                                "package": os.path.basename(topofire_zip_file),
                                "files": []
                            }
                        else:
                            project.updateJob(None, (logging.WARNING, "TopoFire tiles unavailable"), True)

                    # windninja results as tile packages
                    if project.products.get("raster", False):
                        from rastertilemaker import make_tiles_for_output
                        wx_shpfiles = result[5]
                        wn_shpfiles = result[3]

                        # calculate values if needed
                        if not wx_infos:
                            converted = processShapefiles(results_folder, wx_shpfiles, project.path, False)
                            wx_infos = converted[2]
                            wx_max_speed = covnerted[3]

                        if not wn_infos:
                            converted = processShapefiles(results_folder,wn_shpfiles, project.path, False)
                            wn_infos = converted[2]
                            wn_max_speed = converted[3]

                        max_speed = wn_max_speed if (wn_max_speed > wx_max_speed) else wx_max_speed

                        #NOTE: weather points NOT drawn in tiles, but max speed maybe from weather ....
                        #TODO: should this return an error/status?
                        tile_zip = make_tiles_for_output(project.path, (results_folder, wn_shpfiles), (wn_infos, max_speed), project.forecast)
                        project.updateJob(None, (logging.INFO, "Output converted to raster tiles"), True)
                        
                        output = project.output["raster"] = {
                            "name": "WindNinja Raster Tiles",
                            "type": "raster",
                            "format": "tiles",
                            "package": tile_zip,
                            "files": [k.replace(".shp", "") for k in wn_infos.keys()],
                            "data": {
                                "maxSpeed": {
                                    "overall": max_speed
                                }
                            }
                        }

                        for i in wn_infos:
                            name = i.replace(".shp", "")
                            output["data"]["maxSpeed"][name] = wn_infos[i]["max"]

                    # windninja results as custom clustered format
                    if project.products.get("clustered", False):
                        from convolve import createClusters

                        # run calculation if not already done
                        if not wn_infos:
                            wn_infos = {}
                            for f in [a for a in result[4] if a.find("vel") > 0 ]:
                                wn_infos[f] = getRasterInfo(os.path.join(results_folder, f))
        
                            wn_max_speed = sorted(wn_infos.values(), key=lambda x: x["max"], reverse=True)[0]["max"]

                        #NOTE: assumes weather max will be covered if created
                        max_speed = wn_max_speed if (wn_max_speed > wx_max_speed) else wx_max_speed

                        #TODO: should this return a status/error
                        native_wkid = int(wn_infos.values()[0]["native_wkid"])
                        file_format = "json"
                        clustered_file, breakdown = createClusters(results_folder, project.path, "wn_clustered", native_wkid, separate=False, given_max_vel=max_speed, format=file_format)
                        project.updateJob(None, (logging.INFO, "Output converted to cluster"), True)

                        #TODO: zip file
                        zip_name="wn_clustered.zip"
                        zip_files(os.path.join(project.path, zip_name), [os.path.join(project.path, f) for f in clustered_file])

                        output = project.output["clustered"] = {
                            "name": "WindNinja Cluster Vectors",
                            "type": "cluster",
                            "format": file_format,
                            "baseUrl": "",
                            "package": zip_name,
                            "files": clustered_file,
                            "data":  {
                                "maxSpeed": {
                                    "overall": wn_max_speed
                                },
                                "speedBreaks": breakdown
                            }
                        }
                        for i in wn_infos:
                            name = i.replace("_vel.asc", "").replace(".shp", "")
                            output["data"]["maxSpeed"][name] = wn_infos[i]["max"]

                    # processing complete!
                    status = JobStatus.succeeded
                else:
                    project.updateJob(None, (logging.ERROR, result[1]), True)

            else:
                project.updateJob(None, (logging.ERROR, result[1]), True)

    except Exception as e:
        try: 
            msg = str(e).replace("\n", " ")
            if project is not None:
                project.updateJob(None, (logging.ERROR, msg), True)
            else:
                logging.error(msg)
        except: pass

    finish = datetime.datetime.now()
    delta = finish - start

    if project is not None:
        try:        
            msg = "Complete - total processing: {}".format(delta)
            project.updateJob(status.name, (logging.INFO, msg), True)
        except Exception as ex:
            logging.error("job update failed n failed:\t{}".format(str(ex)))

        try: project.sendEmail()
        except Exception as ex:
            logging.error("send notification failed:\t{}".format(str(ex)))

    #TODO: should this be a command line flag to skip or try
    try:
        dequeue(args.id)
        logging.info("Job dequeue")
    except Exception as ex:
        logging.error("job dequeue failed:\t{}".format(str(ex))) 
예제 #16
0
def main(origin, start, color, obj, avoid, held):
    # args will have the following information
    # origin - which corner is the origin - used to create grid
    # start - which coordinates we are currently in
    # color - which section color we are currently in
    # obj - objects that are goals within view
    # avoid - objects which we need to avoid like walls and spacetels
    # held - what the wheel currently holds

    StateCoord = start
    StateColor = color
    objectsToGoFor = obj
    objectsToAvoid = avoid
    currentlyHeld = held
    currState = State(location = StateCoord, currSection = StateColor, listOfObj = objectsToGoFor)
    currState.checkSensors()
    #establishing grid
    field = Grid(origin)

    # determine whether to go sort or go pickup more objects
    items = []
    sortGoal = 0
    if currentlyHeld:
        for x in currentlyHeld:
            items.append((x,field.dist(x)))
        sort = min(items, key = lambda t: t[1])
        sortGoal = Goal(sort[0], sort[1], pickup=False)
        if len(items) == 4:
            sortGoal.priority = math.inf
            return frenet(sortGoal.location, objectsToAvoid), sortGoal
        else:
            sortGoal.generateRound1Priority()


    # currently objects only have coordinates, no color
    for goal in objectsToGoFor:
        obj = Goal(color = goal[0], location = goal[1], pickup=True)
        if (Round == 1):
            obj.generateRound1Priority()
        else:
            obj.generateRound2Priority()
        if (not queue.update(obj)):
            queue.enqueue(obj)
    newGoal = queue.dequeue()
    if not newGoal:
        if not sortGoal: # if nothing exists then explore
            # flip coordinates around centerpiece and set path planning on it
            newX = 0;
            newY = 0;
            if (4.5-currState.location[0] > 0):
                newX = 4.5 + abs(4.5-currState.location[0])
            else:
                newX = 4.5 - abs(4.5-currState.location[0])
            if (4.5-currState.location[1] > 0):
                newY = 4.5 + abs(4.5-currState.location[1])
            else:
                newY = 4.5 - abs(4.5-currState.location[1])
            #aim = ((newX, newY), objectsToAvoid)
            return frenet(((newX, newY), objectsToAvoid)), None
        else: # if no objects to pickup then just sort what u have
            return frenet(sortGoal.location, objectsToAvoid), sortGoal
    else:
        if not sortGoal: # no objects to sort then just pickup anything
            # call frenet on this tuple
            # aim = (newGoal.location, objectsToAvoid)
            return frenet(newGoal.location, objectsToAvoid), newGoal
        else: # choose between sorting and picking up based on priority
            goal = max([sortGoal, newGoal], key = lambda t: t.priority)
            return frenet(goal.location, objectsToAvoid), goal
예제 #17
0
print("Adding 10 values to queue")
for i in range(10):
    queue.enqueue(i)

print("The queue should now be: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]")
print("Your queue is:", queue.getlist())

print("Trying to add 'cow' to queue")
result = queue.enqueue('cow')
print("The return value of that call to enqueue should be False")
print("The returned value from your queue was", result)
print("Printing the queue, should be: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]")
print("Your queue is:", queue.getlist())

print("Removing and printing 4 values from queue")
print("Should be 0:", queue.dequeue())
print("Should be 1:", queue.dequeue())
print("Should be 2:", queue.dequeue())
print("Should be 3:", queue.dequeue())

print("Looking at the first value, but not removing")
print("Should be 4:", queue.peek())

print("Trying to add 'cow' to queue")
queue.enqueue('cow')
print("The queue should now be: [4, 5, 6, 7, 8, 9, 'cow']")
print("Your queue is:", queue.getlist())

print("Trying to enqueue the list ['a', 'b', 'c', 'd', 'e']")
added = queue.multienqueue(['a', 'b', 'c', 'd', 'e'])
print("The number added should be 3.")
예제 #18
0
def main():
    logging.debug("windninja.main()")  #NOTE: THIS DEBUG STATEMENT WILL NEVER GET INTO THE LOG FILE BUT WILL OUTPUT TO STDOUT
    start = datetime.datetime.now()

    # argument parsing
    parser = argparse.ArgumentParser(description="WindNinja Server Wrapper")
    parser.add_argument("id", help="id of the windninja run")
    parser.add_argument("-l", "--log_level", choices=["debug", "info", "warn", "none"], default="none", help="Logging level")
    parser.add_argument("-p", "--pretty_print", action='store_true', help="Pretty print job file")

    #---------------------------------------------------------------------------------
    #IMPORTANT: if args are bad, process will exit without much in the way of logging 
    #   so when run from queue or web be sure to validate command line is correctly
    #   formatted...
    #TODO: create custome parser that logs command line errors to file
    #---------------------------------------------------------------------------------
    args = parser.parse_args()
    logging.debug(str(args))

    project = None
    status = JobStatus.failed
    msg = None

    try: 

        id = args.id.replace("-", "")
        project_path = os.path.join(CONFIG.JOBS_DIRECTORY, id)

        log_level = getattr(logging, args.log_level.upper(), 0)
        if log_level:
            logger.enable_file(project_path, log_level)

        #-----------------------------------------------------------------------
        #IMPORTANT:  FAILURES BEFORE THIS POINT WILL NOT BE LOGGED TO TEXT FILE
        #-----------------------------------------------------------------------

        logging.info("Begin - version {}".format(VERSION))
        logging.debug("project path: {}".format(project_path))

        project = Project(project_path)
        project.pretty_print = args.pretty_print
        project.openJob()
               
        if project is None or project.job is None or project.error is not None:
            logging.error("Exiting: Unable to open project file: {}".format(project.error))
            project = None
        elif project.job["status"] != JobStatus.new.name:
            logging.error("Exiting: Project is not NEW: status={}".format(project.job["status"]))
            project = None
        else:
            project.updateJob(JobStatus.executing.name, (logging.INFO, "Initializing WindNinja Run" ), True)

            # evaluate 'auto' forecast if necessary
            logging.debug("evaluate project forecast: {}".format(project.forecast))
            if project.forecast.lower() == "auto":
                evaluated_forecast = withinForecast(project.bbox)
                logging.debug("evaluated forecast for bbox: {}".format(evaluated_forecast))
                if evaluated_forecast:
                    project.forecast = evaluated_forecast
                    #TODO: should this new value be written back to job info
                    project.updateJob(None, (logging.INFO, "Auto Forecast Evaluated: {}".format(evaluated_forecast)), True)
                else:
                    #project.updateJob(None, (logging.ERROR, MESSAGES.BBOX_OUTSIDE_FORECASTS), True)
                    raise Exception(MESSAGES.BBOX_OUTSIDE_FORECASTS)


            # create the cli output folder
            wncli_folder = os.path.join(project_path, "wncli")
            os.makedirs(wncli_folder)
            
            result = createDem(project.bbox, wncli_folder)
            if result[0]:
                project.demPath = result[1]
                project.updateJob(None, (logging.INFO, "DEM created"), True)

                # execute the cli 
                override_args = {ptr.split(":")[0]: ptr.split(":")[1] for ptr in project.parameters.split(";")}
                #TODO: rethink "products" 
                output_shp = project.products.get("vector", False)
                output_asc = project.products.get("clustered", False)
                output_wx = project.products.get("weather", False)
                        
                result = execute_wncli(wncli_folder, override_args, project.demPath, project.forecast, output_shp, output_asc, output_wx)
                
                #result:
                # 0 : status [True | False]
                # 1 : output_folder | error message [string]
                # 2 : simulations [list of datetime]
                # 3 : windninja_shapefiles [list of string]
                # 4 : windninja_ascfiles [list of string]
                # 5 : weather_shapefiles [list of string]

                if result[0]:
                    project.updateJob(None, (logging.INFO, "WindNinjaCLI executed"), True)
                    results_folder = result[1]
                    
                    # add the simulation times/zone info
                    simulations = result[2]
                    simulations.sort()
                    
                    # initialize some variables used across products
                    wx_infos = wn_infos = None
                    wx_max_speed = wn_max_speed = 0

                    project.output = {
                        "simulations" : {
                            "times" : ["{:%Y%m%dT%H%M}".format(d) for d in simulations], 
                            "utcOffset" : "{:%z}".format(result[2][0])
                        }
                    }

                    # generate the desired output products

                    # weather results as geojson vectors
                    #TODO: even though the wx data is small (a few hundred points) if it was aggregated to 
                    #       a single file it might help with performance... and size could be reduced if
                    #       using a denormalized format - the geometry json is approx 1/2 the file size.
                    #       
                    if project.products.get("weather",False):
                        converted_weather = processShapefiles(results_folder, result[5], project.path, True, where="speed>0", zip_name="wx_geojson.zip")
                        if converted_weather[0]:
                            project.updateJob(None, (logging.INFO, "Weather converted to geojson"), True)
                            wx_infos = converted_weather[2]
                            wx_max_speed = converted_weather[3]
                            output = project.output["weather"] = {
                                "name": "Weather Json Vectors",
                                "type": "vector",
                                "format": "json",
                                "package": os.path.basename(converted_weather[4]),
                                "files": converted_weather[1],
                                "data": {
                                    "maxSpeed": {
                                       "overall": wx_max_speed
                                    }
                                }
                            }
                            for i in wx_infos:
                                name = i.replace("shp", "json")
                                output["data"]["maxSpeed"][name] = wx_infos[i]["max"]
                        else:
                            project.updateJob(None, (logging.ERROR, converted_weather[1]), True)

                    # windninja resutls as geojson vectors
                    if project.products.get("vector",False):
                        converted_windninja = processShapefiles(results_folder, result[3], project.path, True, zip_name="wn_geojson.zip")
                        if converted_windninja[0]:
                            project.updateJob(None, (logging.INFO, "Output converted to geojson"), True)
                            wn_infos = converted_windninja[2]
                            wn_max_speed = converted_windninja[3]
                            output = project.output["vector"] = {
                                "name": "WindNinja Json Vectors",
                                "type": "vector",
                                "format": "json",
                                "package": os.path.basename(converted_windninja[4]),
                                "files": converted_windninja[1],
                                "data": {
                                    "maxSpeed": {
                                        "overall": wn_max_speed
                                    }
                                }
                            }
                            for i in wn_infos:
                                name = i.replace("shp", "json")
                                output["data"]["maxSpeed"][name] = wn_infos[i]["max"]
                        else:
                            project.updateJob(None, (logging.ERROR, converted_windninja[1]), True)

                    # topofire tiles
                    #TODO: this one could be kicked off in a parrallel process as it doesn't rely on the WN output
                    #TODO: Tile fetch is surprisingly quick but could create a local cache that is auto built as requests come in.
                    if project.products.get("topofire", False):
                        from tilegrabber import grab_tiles
                        topofire_zip_file = grab_tiles(project.bbox, project.path, "topofire")
                        
                        if topofire_zip_file:
                            project.updateJob(None, (logging.INFO, "TopoFire tiles compiled"), True)

                            project.output["topofire"] = {
                                "name": "TopoFire Basemap",
                                "type": "basemap",
                                "format": "tiles",
                                "package": os.path.basename(topofire_zip_file),
                                "files": []
                            }
                        else:
                            project.updateJob(None, (logging.WARNING, "TopoFire tiles unavailable"), True)

                    # windninja results as tile packages
                    if project.products.get("raster", False):
                        from rastertilemaker import make_tiles_for_output
                        wn_shpfiles = result[3]

                        # calculate values if needed
                        if output_wx and not wx_infos:
                            wx_shpfiles = results[5]
                            converted = processShapefiles(results_folder, wx_shpfiles, project.path, False)
                            wx_max_speed = covnerted[3]

                        if not wn_infos:
                            converted = processShapefiles(results_folder,wn_shpfiles, project.path, False)
                            wn_infos = converted[2]
                            wn_max_speed = converted[3]

                        max_speed = wn_max_speed if (wn_max_speed > wx_max_speed) else wx_max_speed

                        #NOTE: weather points NOT drawn in tiles, but max speed maybe from weather ....
                        #TODO: should this return an error/status?
                        tile_zip = make_tiles_for_output(project.path, (results_folder, wn_shpfiles), (wn_infos, max_speed), project.forecast)
                        project.updateJob(None, (logging.INFO, "Output converted to raster tiles"), True)
                        
                        output = project.output["raster"] = {
                            "name": "WindNinja Raster Tiles",
                            "type": "raster",
                            "format": "tiles",
                            "package": tile_zip,
                            "files": [k.replace(".shp", "") for k in wn_infos.keys()],
                            "data": {
                                "maxSpeed": {
                                    "overall": max_speed
                                }
                            }
                        }

                        for i in wn_infos:
                            name = i.replace(".shp", "")
                            output["data"]["maxSpeed"][name] = wn_infos[i]["max"]

                    # windninja results as custom clustered format
                    if project.products.get("clustered", False):
                        from convolve import createClusters

                        # run calculation if not already done
                        if not wn_infos:
                            wn_infos = {}
                            for f in [a for a in result[4] if a.find("vel") > 0 ]:
                                wn_infos[f] = getRasterInfo(os.path.join(results_folder, f))
        
                            wn_max_speed = sorted(wn_infos.values(), key=lambda x: x["max"], reverse=True)[0]["max"]

                        #NOTE: assumes weather max will be covered if created
                        max_speed = wn_max_speed if (wn_max_speed > wx_max_speed) else wx_max_speed

                        #TODO: should this return a status/error
                        native_wkid = int(wn_infos.values()[0]["native_wkid"])
                        file_format = "json"
                        clustered_file, breakdown = createClusters(results_folder, project.path, "wn_clustered", native_wkid, separate=False, given_max_vel=max_speed, format=file_format)
                        project.updateJob(None, (logging.INFO, "Output converted to cluster"), True)

                        #TODO: zip file
                        zip_name="wn_clustered.zip"
                        zip_files(os.path.join(project.path, zip_name), [os.path.join(project.path, f) for f in clustered_file])

                        output = project.output["clustered"] = {
                            "name": "WindNinja Cluster Vectors",
                            "type": "cluster",
                            "format": file_format,
                            "baseUrl": "",
                            "package": zip_name,
                            "files": clustered_file,
                            "data":  {
                                "maxSpeed": {
                                    "overall": wn_max_speed
                                },
                                "speedBreaks": breakdown
                            }
                        }
                        for i in wn_infos:
                            name = i.replace("_vel.asc", "").replace(".shp", "")
                            output["data"]["maxSpeed"][name] = wn_infos[i]["max"]

                    # processing complete!
                    status = JobStatus.succeeded
                else:
                    project.updateJob(None, (logging.ERROR, result[1]), True)

            else:
                project.updateJob(None, (logging.ERROR, result[1]), True)

    except Exception as e:
        try: 
            msg = str(e).replace("\n", " ")
            if project is not None:
                project.updateJob(None, (logging.ERROR, msg), True)
            else:
                logging.error(msg)
        except: pass

    finish = datetime.datetime.now()
    delta = finish - start

    if project is not None:
        try:        
            msg = "Complete - total processing: {}".format(delta)
            project.updateJob(status.name, (logging.INFO, msg), True)
        except Exception as ex:
            logging.error("job update failed n failed:\t{}".format(str(ex)))

        try: project.sendEmail()
        except Exception as ex:
            logging.error("send notification failed:\t{}".format(str(ex)))

    #TODO: should this be a command line flag to skip or try
    try:
        dequeue(args.id)
        logging.info("Job dequeue")
    except Exception as ex:
        logging.error("job dequeue failed:\t{}".format(str(ex))) 
예제 #19
0
        print("Pushing value : " + str(val))
        s.push(new_stack, val)

    print(
        "Enqueue-ing the stack containing those past 3 values onto the queue!!\n"
    )
    q.enqueue(queue, new_stack)

print("\nDe-queue-ing the queue!\n")

# empty the queue
while not q.is_empty(queue):

    print("De-queue-ing a stack!")
    current_dequeued_stack = q.dequeue(queue)

    while not q.is_empty(current_dequeued_stack):
        print("Popped value from de-queued stack: " +
              str(s.pop(current_dequeued_stack)))

    print()  # Aesthetic

print("***********************************************")

##################################################################################
#
# A queue that enqueues queues!  #################################################
#
##################################################################################