def test_dict_obj_transformation(task_obj): result_dict = Task.to_dict(task_obj) assert(result_dict['_id'] == "1") assert(len(result_dict['subreddits']) == 3) result_obj = Task.from_dict(result_dict) assert(result_obj.id == "1")
def test_simple_task(): """Validate Task can run a simple task""" def _foo(): yield 3 yield 4 task = Task(_foo()) assert task.run() == 3 assert task.run() == 4
def taskCreate(self, number): print("Task name:") name = self.cursor() print("Content:") content = self.cursor() task = Task(self.profile.name + " " + self.profile.surname, name, content) task.setNumber(number) return task.__str__()
def loadTasks(self, content): tasks = [] counter = 0 for st in content: splited = st.split("|") for i in splited: i.strip() if splited.__len__() > 3: id = splited[0] author = splited[1] title = splited[2] content = splited[3] task = Task(author, title, content) task.setNumber(id) if splited.__len__() > 4: date = splited[4] task.setDate(date) if splited.__len__() > 5: status = splited[5] task.setStatus(status) tasks.append(task) counter = counter + 1 return tasks
async def test_task_success(): """Test Task class with success result.""" message = MockIncommingMessage(success_message_body) task = Task(message) assert task.text == message.body.decode('UTF-8') assert task.json == success_message_body worker = task.delegate() minecraft_builder = workers.get('Minecraft: Java Edition') assert isinstance(worker, minecraft_builder) assert task.json['build']['id'] == worker.build_id assert task.json['game'] == worker.game assert task.json['version'] == worker.version
def main(argv): """Receive args from command line and pass them to the task instance """ # Create the parser parser = argparse.ArgumentParser(description='Run the task of grid search') # Add the arguments parser.add_argument('task',type=str, help=' - name of the task') parser.add_argument('file',type=str, help=' - file name of the data file') # Execute the parse_args() method task_name = parser.parse_args().task file_name = parser.parse_args().file task = Task(task_name, file_name) task.run_models()
def add_task(self): """Create a task on this server""" if len(self.task_list) < self.umax: self.task_list.append(Task(self.ttask)) else: raise Exception('Maximum number of tasks({}) exceeded.'.format( self.umax))
def get_tasks(self, check_list_id: int) -> Tuple[int, str, List[Task]]: """ :param check_list_id: Str name of the list :return: List of task objects """ if self._cursor: try: sql_statement = """ SELECT * FROM Tasks WHERE checkListID == ? """ self._cursor.execute(sql_statement, (check_list_id, )) response = self._cursor.fetchall() tasks = [] for row in response: task = Task( tid=row[0], due_date=row[1], description=row[2], done=row[3], check_list_id=check_list_id, ) tasks.append(task) return 0, "", tasks except sqlite3.Error as e: print(e) return 1, "Database error: {0}".format(e), [] return 1, "There is no connection.", []
def johnson3(tasks): # Creating 2 virtual machines virtual_tasks = [] for i in range(0, len(tasks)): first_comp = tasks[i].times[0] + tasks[i].times[1] second_comp = tasks[i].times[1] + tasks[i].times[2] virtual_tasks.append(Task(i, [first_comp, second_comp])) return johnson2(virtual_tasks)
def generate_tasks(self, tasks_amount, durations, randomize_durations): self.tasks = [] if randomize_durations: min, max = durations for i in range(0, tasks_amount): self.tasks.append( Task(id=i, duration=randrange(min, max, 1), stopper=self.stopper)) return tasks else: if not tasks_amount == len(durations): logging.error("Tasks amount does not match durations list") exit(1) for i in range(0, tasks_amount): self.tasks.append( Task(id=i, duration=durations[i], stopper=self.stopper))
def test_from_dict(task_dict): result_obj = Task.from_dict(task_dict) assert(result_obj.id == "1") assert(not result_obj.title) assert(len(result_obj.subreddits) == 3) assert(isinstance(result_obj.subreddits[0], SubredditTask)) assert(result_obj.subreddits[0].name == "subreddit1" and result_obj.subreddits[0].flair_id == "fake-flair-id") assert(not result_obj.subreddits[1].flair_id and not result_obj.subreddits[1].error)
def main(): # t1 = Task("T1", 10, 3, 10) # t2 = Task("T2", 15, 4, 15) # t3 = Task("T3", 5, 1, 5) t1 = Task("T1", 10, 2, 8) t2 = Task("T2", 2, 1, 2) t3 = Task("T3", 20, 5, 18) # t1 = Task("T1", 10, 2, 8) # t2 = Task("T2", 10, 3, 7) # t3 = Task("T3", 20, 8, 18) ts = [t1, t2, t3] dms = DMS() s = dms.schedule(ts) for t in s: print(t.task.id + '\t' + str(t.start_time) + '\t' + str(t.end_time))
def generating(iter): number_of_machines = int(input("Ile maszyn?: ")) number_of_tasks = int(input("Ile zadan?: ")) bruteforceSpan = [] johnsonSpan = [] nehSpan = [] durationBruteforce = [] durationJohnson = [] durationNeh = [] i = 0 while i < iter: generatedTasks = [] cnt_tasks = 0 cnt_machines = 0 for cnt_tasks in range(0, number_of_tasks): rows = [] for cnt_machines in range(0, number_of_machines): rows.append(int(random.uniform(1, 10))) print("{}".format(rows)) generatedTasks.append(Task(cnt_tasks, rows)) bruteforceOrder, timeBruteforce = bruteforce( copy.deepcopy(generatedTasks), number_of_machines) johnsonOrder, timeJohnson = johnson(copy.deepcopy(generatedTasks), number_of_machines) nehOrder, timeNeh = neh(copy.deepcopy(generatedTasks), number_of_machines) durationBruteforce.append(timeBruteforce) durationJohnson.append(timeJohnson) durationNeh.append(timeNeh) bruteforceMakespan = makespan(bruteforceOrder, generatedTasks, number_of_machines) johnsonMakespan = makespan(johnsonOrder, generatedTasks, number_of_machines) nehMakespan = makespan(nehOrder, generatedTasks, number_of_machines) bruteforceSpan.append(bruteforceMakespan) johnsonSpan.append(johnsonMakespan) nehSpan.append(nehMakespan) i += 1 x = PrettyTable() print("") print("----------------------------------------------------------") x.field_names = [ "l.p.", "Bruteforce makespan", "Johnson makespan", "Neh makespan", "Czas bruteforce [ms]", "Czas Johnson [ms]", "Czas Neh [ms]" ] k = 0 for k in range(0, iter): x.add_row([ k + 1, "{}".format(bruteforceSpan[k]), "{}".format(johnsonSpan[k]), "{}".format(nehSpan[k]), "{}".format(durationBruteforce[k]), "{}".format(durationJohnson[k]), "{}".format(durationNeh[k]) ]) print(x)
def task_obj_only_crosspost(): return Task(id="3", crosspost_source_link="https://reddit.com/fake-post", completed=False, subreddits=[ SubredditTask(name="subreddit1", flair_id="fake-flair-id", processed=False), SubredditTask(name="subreddit2", processed=False), SubredditTask(name="subreddit3", processed=False) ])
class TestTask(unittest.TestCase): def setUp(self): self.task = Task(2) def test_task_missing_ttask(self): """Test the start of missing_ttask var.""" self.assertEqual(2, self.task.missing_ttask) def test_task_clock(self): """tests missing_ttask every clock().""" self.task.clock() self.assertEqual(1, self.task.missing_ttask) self.task.clock() self.assertEqual(0, self.task.missing_ttask) def test_task_alive(self): self.task.clock() self.assertTrue(self.task.is_alive()) self.task.clock() self.assertFalse(self.task.is_alive())
def _add_task(self, _) -> None: """ :param _: Tk Event Object :return: None """ text = self.get() if text: self._clean() # ToDo: List_name self._entry_frame.add_task(task=Task(description=text))
class TestTaskClass(unittest.TestCase): """ Test cases for the Task class """ def setUp(self): self.a_task = Task() self.a_task.Skill_Pool.clear() self.a_task.add_skill("python") self.python_skill = Skill('python') def test_add_skill(self): self.assertEqual('python', self.a_task.Skill_Pool[0].name, msg="There should be a skill named 'python' in tasks") def test_studied_invalid_input(self): self.assertEqual( 'Add skill first', self.a_task.studied('Javascript'), msg="Should first add a skill before marking it as studied")
def test_to_dict(task_obj): result_dict = Task.to_dict(task_obj) assert(result_dict['_id'] == "1") assert(result_dict['link'] == "https://fake-link.com") assert(not result_dict['title']) assert(len(result_dict['subreddits']) == 3) assert(isinstance(result_dict['subreddits'][0], dict)) assert(result_dict['subreddits'][0]['name'] == "subreddit1" and result_dict['subreddits'][0]['flair_id'] == "fake-flair-id") assert(not result_dict['subreddits'][1]['processed'] and not result_dict['subreddits'][1]['error'])
def build_tasks(parts) -> List[Task]: back = list() part_number = 0 while part_number < len(parts): task_head = parts[part_number] task_body = parts[part_number + 1].split("\n") # task_body.pop(0) #task_body.pop() task_body.insert(0, task_head) back.append(Task(task_body)) part_number += 2 return back
def add_task(self, task: Task) -> None: """ :param task: Task object :return: """ task.check_list_id = self._selected_cid error, message = self._task_controller.add_task(task=task) if error: messagebox.showerror(title="Error", message=message) else: self._tasks_tk.refresh()
def addTask2bloc(time, req_mat, req, ms1, ms2, ms3, ms4, fov, gtw): tasks = [] for index, row in time.iterrows(): t = Task(index, time.loc[index, 'Tps_total'], time.loc[index, 'Tps_QC']) t.min_start = req_mat.loc[index, 'Max_livraion'] tasks.append(t) if ms1.name in t.name: ms1.addTask(t) elif ms2.name in t.name: ms2.addTask(t) elif ms3.name in t.name: ms3.addTask(t) elif ms4.name in t.name: ms4.addTask(t) elif fov.name in t.name: fov.addTask(t) elif gtw.name in t.name: gtw.addTask(t) else: raise ValueError for index, row in req.iterrows(): t = list(filter(lambda t: t.name == index, tasks))[0] if req.loc[index, 'tasks_req'] is not None: t.previous = list(filter(lambda t: t.name in req.loc[index, 'tasks_req'], tasks)) t.pending_prec = len(t.previous) if req.loc[index, 'next'] is not None: t.next = list(filter(lambda t: t.name in req.loc[index, 'next'], tasks))
def task_obj(): return Task(id="1", link="https://fake-link.com", crosspost_source_link="https://reddit.com/fake-post", reply_content="sample reply", completed=False, subreddits=[ SubredditTask(name="subreddit1", flair_id="fake-flair-id", processed=False), SubredditTask(name="subreddit2", processed=False), SubredditTask(name="subreddit3", processed=False) ])
def task_obj_no_crosspost(): return Task(id="2", link="https://fake-link.com", reply_content="sample reply", completed=False, subreddits=[ SubredditTask(name="subreddit1", flair_id="fake-flair-id", processed=False), SubredditTask(name="subreddit2", processed=False), SubredditTask(name="subreddit3", processed=False) ], title="fake-title-provided")
def create_task(self, args): task = Task(name=args.name, priority=args.priority, project=args.project if args.project else None, executor=args.executor if args.executor else None, status=args.status, sub_tasks_uid=args.sub_tasks) self.tasks[task.uid] = task self.logger.info(f'Task {task.uid} with name {task.name} was created ' f'(project - {task.project}, executor - {task.executor}') if task.executor: self.developers[task.executor].add_task(task) if task.project: self.projects[task.project].add_task(task)
def run(): # this is an and gate objective = {(0, 0): -0.0, (1, 1): -0.0, (2, 2): 6.0} constraints = {(0, 1): 2.0, (0, 2): -4.0, (1, 2): -4.0} # create a new task t = Task() # set the data for the task t.set_data_with_dicts(objective, constraints) # run the task on a quantum computer t.run_data() # save result as json t.save_as_json()
def prepare_conll03(args): args.chunk = args.chunk or args.pos args.ner = args.ner or args.pos assert args.pos == args.chunk == args.ner pipe = Conll2003Pipe(chunk_encoding_type="bio", ner_encoding_type="bioes", lower=False) db = pipe.process_from_file(args.pos) task_lst = [] for idx, task_name in enumerate(["pos", "chunk", "ner"]): task_lst.append( Task( idx, task_name, deepcopy(db.get_dataset("train")), deepcopy(db.get_dataset("dev")), deepcopy(db.get_dataset("test")), )) return task_lst, db.vocabs
def __init__(self, id, per_tasks, latitude=random.randint(config.ROAD_START, config.ROAD_LENGTH), longitude=random.randint(config.ROAD_START, config.ROAD_LENGTH), real_dis_edge=None): self.latitude = latitude self.longitude = longitude speed_val = 20 speed_angel = random.uniform(0, 2 * 3.14) self.speed = (speed_val, speed_angel) self.id = id self.task_queue = list() for i in range(per_tasks): task = Task(self, i, None, real_dis_edge) self.task_queue.append(task) self.waiting_queue = list()
def prepare_ptb(args): datas = {} datas["pos"] = (ConllLoader(headers=["words", "pos"], indexes=[0, 1]).load(args.pos).datasets) chunk_data = (ConllLoader(headers=["words", "chunk"], indexes=[0, 2]).load(args.chunk).datasets) chunk_data['train'], chunk_data['dev'] = chunk_data['train'].split(0.1) datas['chunk'] = chunk_data datas["ner"] = (ConllLoader(headers=["words", "ner"], indexes=[0, 3]).load(args.ner).datasets) for ds in datas['chunk'].values(): ds.apply_field(lambda x: iob2(x), 'chunk', 'chunk') for ds in datas['ner'].values(): ds.apply_field(lambda x: iob2bioes(iob2(x)), 'ner', 'ner') vocabs = {} src_vocab = Vocabulary() for idx, task_name in enumerate(["pos", "chunk", "ner"]): data = datas[task_name] filter_docstart(data) vocab = Vocabulary(padding=None, unknown=None) vocab.from_dataset(*list(data.values()), field_name=task_name) src_vocab.from_dataset(*list(data.values()), field_name="words") vocabs[task_name] = vocab task_lst = [] for idx, task_name in enumerate(["pos", "chunk", "ner"]): data = datas[task_name] src_vocab.index_dataset(*list(data.values()), field_name="words", new_field_name="words") vocabs[task_name].index_dataset(*list(data.values()), field_name=task_name, new_field_name=task_name) for ds in data.values(): ds.apply_field(len, 'words', 'seq_len') task_lst.append( Task(idx, task_name, data["train"], data["dev"], data["test"])) vocabs["words"] = src_vocab return task_lst, vocabs
def create_new_split_tile_tasks(self): """ Creates new tasks for splitting tiles. Only creates as many split tile tasks as cores that have been specified in the config. :return: None """ count = 0 for parent_tile in self._target_tiles: if parent_tile in self._unprocessed_tiles: # Dict would be faster here if count >= number_of_processing_threads: break arguments = [ self._tile_connectivity[parent_tile], self._tile_connectivity ] new_task = Task(task="split_ahn3_tile", arguments=arguments) self.task_queue.put(new_task) self._unprocessed_tiles.remove(parent_tile) count += 1
def _create_merge_task_for_tile(self, completed_tile_name: str, interpolation_type: str): """ Creates a merge rasters task used the supplied tile name and interpolation type. Relies on both these parameters because the completed_tiles variable contains both DTM and DSM data :param completed_tile_name: String representing the name of the tile that has been completed (e.g. 36FN2) :param interpolation_type: String representing interpolation type (dsm or dtm) :return: None """ successfully_interpolated_tiles = [] indexes_to_remove = [] parent_tile = None if interpolation_type == "dtm": completed_tiles = self._completed_dtm else: completed_tiles = self._completed_dsm for tile_index in range(len(completed_tiles[completed_tile_name])): tile = completed_tiles[completed_tile_name][tile_index] if tile.interpolated is True and tile.related_raster is not None: parent_tile = tile successfully_interpolated_tiles.append(tile) indexes_to_remove.append(tile_index) if interpolation_type == "dtm": del self._completed_dtm[completed_tile_name] else: del self._completed_dsm[completed_tile_name] if parent_tile is not None: arguments = [ parent_tile.get_parent_tile(), successfully_interpolated_tiles ] self.task_queue.put(Task(task="merge_rasters", arguments=arguments))