def __init__(self, size=(100, 100), agents=[], tasks=TaskQueue(), AI_ON=True):
        self.size = size
        self.ai_on = False;
        self.agents = agents
        self.tasks = tasks
        self.map = np.ones(size)
        self.user_priority_queue = TaskQueue()
        self.world_task_queue = TaskQueue()
        self.generate_agents()
        self.generate_world_tasks()
        if(self.ai_on):
            self.assign_tasks()

        self.ros_controller = ROSController()
        self.restart = False
示例#2
0
 def __init__(self, num_workers=3):
     self.monitor = Monitor()
     self.google_queue = TaskQueue(-1,
                                   "Google",
                                   self.monitor,
                                   num_workers=num_workers)
     self.azure_queue = TaskQueue(-1,
                                  "Azure",
                                  self.monitor,
                                  num_workers=num_workers)
     self.amazon_queue = TaskQueue(-1,
                                   "Amazon",
                                   self.monitor,
                                   num_workers=num_workers)
     self.maxID = 1
     self.start_reassignmet()
示例#3
0
def init_queue():
    # 以10个任务为测试基准
    for i in range(10):
        _new_task = TaskQueue()
        _new_task.set_download_url(
            "http://localhost:5000/download_sourcefile/REDEME.md")
        _new_task.set_target_node(None)
        _new_task.set_upload_url(None)
        _new_task.add()
示例#4
0
    def __init__(self, id, buffer=5):
        threading.Thread.__init__(self)
        self.id = id
        self.daemon = True
        self.buffer_time = buffer

        self.q = TaskQueue()
        self.last_action_time = 0
        self.new_task = threading.Event()
        self.task_done = threading.Event()
示例#5
0
def ask_task():
    # 节点请求任务,用json作为传递任务参数的媒介
    # 包含:任务id,任务资源下载地址,任务结果上传地址,任务目标节点
    from TaskQueue import TaskQueue
    print("node is asking for task...")
    test = TaskQueue()
    # 这里需要写test=,不明原因
    test = test.get()
    _response = jsonify(test.to_json_dict())
    return _response
    def get_compabitible_task_for_agent(self, agent, queue):
        compatible_task_list = TaskQueue()
        for task in self.tasks.get_queue():
            if task.is_agent_compatible(agent):
                compatible_task_list.enqueue(task)


        if compatible_task_list.is_empty():
            print "No compatible tasks found for agent %s" %agent.name
            return None

        return compatible_task_list
示例#7
0
    def __init__(self, p=0.75, p1=0.8, p2=0.5, iteration_count=100000):
        self.current_tick = 0
        self.handled_tasks = []
        self.states = []
        self.iteration_count = iteration_count
        self.busy_count = 0

        self.source = Source(p, LemerGenerator(209715120, 3, 7))
        self.queue = TaskQueue(2)
        self.handlers = [
            Handler(p1, LemerGenerator(209715120, 3, 7)),
            Handler(p2, LemerGenerator(209715120, 3, 7))
        ]
示例#8
0
    def __init__(self, p1=0.4, p2=0.5, iteration_count=100000):

        self.iteration_count = iteration_count
        self.task_in_system_count = 0
        self.current_tick = 0
        self.handled_count = 0
        self.refused_count = 0
        self.states = []
        self.p1 = p1
        self.source = Source()
        self.queue = TaskQueue(2)
        self.handlers = [
            Handler(p1, LemerGenerator(209715120, 3, 7)),
            Handler(p2, LemerGenerator(209715120, 3, 7))
        ]
 def __init__(self,
              name,
              pose=Pose(),
              size=(1.0, 1.0, 1.0),
              capabilities=[],
              movement_speed=0.1):
     # Tuple contain the x,y,z coordinates of the agent
     self.pose = pose  # Rospy pose data structure, used in Unity to determine space
     self.name = name  # Name of this particular agent, needs to be unique
     self.size = size  # the x,y,z size of the agent
     self.task_queue = TaskQueue()
     self.capabilities = capabilities
     self.status = ""
     self.current_action = "idle"
     self.movement_speed = movement_speed
     self.is_at_task = False
     self.ros_controller = ROSController()
示例#10
0
#!/usr/bin/python3
from ActionDatabase import ActionDatabase
import time
import subprocess as sp
import os
from TaskQueue import TaskQueue

print("Starting Friday...")
action_db = ActionDatabase("./actions")
print("Action DB ready")

print("Starting API server")
api = sp.Popen(["python3", "api.py"], cwd='.')
print("API ready")


def execute(task):
    print('Executing command ', task['command'])
    process = action_db.findAndExec(task['command'], str(task['id']))
    if not process:
        return False
    else:
        return True


queue = TaskQueue()
queue.watch(execute)

while True:
    pass
示例#11
0
def getSingleFileLinks(text):
    links = []

    doLog("getSingleFileLinks start")
    dp = xbmcgui.DialogProgress()
    dp.create("Searching for single file links", "")

    doLog("getSingleFileLinks finding matches")
    link_matches = re.findall(
        r'(http://hotfile.com/[^"]+/([^"]+).(flv|mpg|mpeg|mov|avi|mkv).html)',
        text, re.M | re.S)

    if not link_matches:
        doLog("getSingleFileLinks no links found")
        dp.update(100, "None found")
        del dp
        return links

    dp.update(0, "Removing duplicate links")
    link_matches[:] = dedupeList(link_matches, 0)

    num_fetch_threads = 4
    url_queue = TaskQueue()

    def validateUrlsThread(i, q):
        """This is the worker thread function.
        It processes items in the queue one after
        another.  These daemon threads go into an
        infinite loop, and only exit when
        the main thread ends.
        """
        while True:
            url = q.get()
            valid_link = getHFLink(hotfile_user, hotfile_pass, url)
            if valid_link:
                links.append(valid_link)
            else:
                doLog("getSingleFileLinks NOT ADDING: " + url)
            q.task_done()

    # Set up some threads to validate the urls
    for i in range(num_fetch_threads):
        worker = Thread(target=validateUrlsThread, args=(
            i,
            url_queue,
        ))
        worker.setDaemon(True)
        worker.start()

    if link_matches:
        doLog("getSingleFileLinks iterate over link matches")
        for link, name, extension in link_matches:
            parsed_url = urlparse(link)
            dp.update(0, "Validating link:", parsed_url[1],
                      parsed_url[2].split("/")[-1])
            doLog(
                "getSingleFileLinks appending:\nLINK: %s\nNAME: %s\nEXT: %s" %
                (link, name, extension))
            url_queue.put(link)
        # wait for queue to empty which means all urls validated
        url_queue.join()

    doLog("getSingleFileLinks done")
    dp.update(100, "Done getting single file links!")
    del dp
    return links
示例#12
0
def main():
    test = TaskQueue()
    with app.app_context():
        init_queue()
示例#13
0
    def run(self):

        while isRunning:

            if not self.robot.task_queue.isVideoQueueEmpty():
                url = self.robot.task_queue.getFromVideoQueue()
                self.robot.getVideoInfo(url)
            else:
                print('等待中...')
                time.sleep(random.randrange(5, 10))


if __name__ == '__main__':
    v_bloom = BloomFilter(500000)
    a_bloom = BloomFilter(10000)
    t_bloom = BloomFilter(30000)
    task_queue = TaskQueue()
    robot = Robot(v_bloom, a_bloom, t_bloom, task_queue)
    currentVideos = VideoDao.find_all_video_urls()
    currentAuthors = AuthorDao.find_all_author_urls()
    currentTags = TagDao.find_all_tag_urls()

    robot.initFilter(currentVideos, currentAuthors, currentTags)

    p = Producer(robot=robot)
    p.start()

    time.sleep(10)
    c = Consumer(robot=robot)
    c.start()