def test_redo() -> None: manager = TaskManager([]) manager.add(Task("sop")) manager.undo() assert not manager.tasks() manager.redo() assert manager.tasks() == [Task("sop")]
def openApplication(debugMode=False): """ Opens the main Qt application """ # retrieve a path to QML sources pwd = os.path.dirname(__file__) qmldir = os.path.join(pwd, "qml") qmlfile = os.path.join(qmldir, "main.qml") # enable material style os.environ["QT_QUICK_CONTROLS_STYLE"] = "Material" # catch warning & errors logger = Logger() QtCore.qInstallMessageHandler(logger.messageHandler) # new Qt application app = QGuiApplication(sys.argv) engine = QQmlApplicationEngine() # override the standard QML engine when debug mode is enabled if debugMode: print("DEBUG mode") engine = qmlinstantengine.QmlInstantEngine() engine.addFilesFromDirectory(qmldir, recursive=True) # add a custom module import path moduledir = os.path.join(qmldir, "modules") engine.addImportPath(moduledir) # expose custom properties to the QML side taskManager = TaskManager("main task manager") engine.rootContext().setContextProperty("_taskManager", taskManager) engine.rootContext().setContextProperty("_logger", logger) engine.rootContext().setContextProperty("_debug", debugMode) # load our main QML file & start the application engine.load(qmlfile) engine.quit.connect(app.quit) sys.exit(app.exec_())
def main(): args = parser() ##Configure and get the tasks tasks = taskReturner(args.task, config=args.config)() ##Run the manager manager = TaskManager(args.check_output) manager.tasks = tasks manager.runTasks()
def __init__(self, node, config_desc, keys_auth, client, use_ipv6=False, use_docker_machine_manager=True): self.client = client self.keys_auth = keys_auth self.config_desc = config_desc self.node = node self.task_keeper = TaskHeaderKeeper(client.environments_manager, min_price=config_desc.min_price) self.task_manager = TaskManager( config_desc.node_name, self.node, self.keys_auth, root_path=TaskServer.__get_task_manager_root(client.datadir), use_distributed_resources=config_desc. use_distributed_resource_management, tasks_dir=os.path.join(client.datadir, 'tasks')) self.task_computer = TaskComputer( config_desc.node_name, task_server=self, use_docker_machine_manager=use_docker_machine_manager) self.task_connections_helper = TaskConnectionsHelper() self.task_connections_helper.task_server = self self.task_sessions = {} self.task_sessions_incoming = WeakList() self.max_trust = 1.0 self.min_trust = 0.0 self.last_messages = [] self.last_message_time_threshold = config_desc.task_session_timeout self.results_to_send = {} self.failures_to_send = {} self.use_ipv6 = use_ipv6 self.forwarded_session_request_timeout = config_desc.waiting_for_task_session_timeout self.forwarded_session_requests = {} self.response_list = {} self.deny_set = get_deny_set(datadir=client.datadir) network = TCPNetwork( ProtocolFactory(MidAndFilesProtocol, self, SessionFactory(TaskSession)), use_ipv6) PendingConnectionsServer.__init__(self, config_desc, network)
def main(): args = parser() tasks = [] taskDir = "" ##Read general config and update with given config if args.config or args.abs_config: confDir = "{}/ChargedAnalysis/Workflow/config/".format( os.environ["CHDIR"]) config = yaml.load(open("{}/general.yaml".format(confDir), "r"), Loader=yaml.Loader) if args.config: config.update( yaml.load(open("{}/{}.yaml".format(confDir, args.config), "r"), Loader=yaml.Loader)) else: config.update( yaml.load(open(args.abs_config, "r"), Loader=yaml.Loader)) taskDir = "{}/Results/{}/{}".format(os.environ["CHDIR"], args.task, config["dir"].split("/")[0]) os.makedirs(taskDir, exist_ok=True) with open("{}/config.yaml".format(taskDir), "w") as conf: yaml.dump(config, conf, default_flow_style=False, indent=4) config["dir"] = "{}/Results/{}/{}".format(os.environ["CHDIR"], args.task, config["dir"]) config["era"] = args.era ##Configure and get the tasks tasks = taskReturner(args.task, config)() elif args.run_again: taskDir = "/".join(args.run_again.split("/")[:-1]) ##Run the manager manager = TaskManager(tasks=tasks, existingFlow=args.run_again, dir=taskDir, longCondor=args.long_condor, globalMode=args.global_mode, nCores=args.n_cores) manager.run(args.dry_run)
def main(): # Creates taskmanager instance and opens file for output. tm = TaskManager() # Start program. # Imports task list from file & displays it. print("Hi! I'm Herupa, your task helper.") tm.displayTasks() print() # If we have no tasks, create a list. if not tm.hasTasks(): tm.makeTaskList() tm.displayTasks() # While we have tasks, let the user enter a task, finish a task, or view completed tasks. while (tm.hasTasks()): print() print("E [task] to enter new task.") print("F [task] to finish a task.") print("L to see a list of completed tasks.") print("Q to quit.") response = input() if response[0] == "F" or response[0] == "f": cur_task = response[2:] tm.finishTask(cur_task) tm.displayTasks() elif response[0] == "e" or response[0] == "e": cur_task = response[2:] tm.newTask(cur_task) tm.displayTasks() elif response[0] == "q" or response[0] == "Q": break elif response[0] == 'l' or response[0] == "L": tm.displayCompletedTasks() else: print("Not a valid command.") print() tm.displayCompletedTasks() tm.closeFile()
def load_from_file(self, path: Path) -> None: self._serializer = self._serializer_type(path) self._task_manager = TaskManager(self._serializer.load()) self._view.setWindowTitle(path.name) self.request_update()
def __init__(self): self.task_manager = TaskManager() self.issue_manager = IssueManager() self.url_handler = UrlHandler()
def test_is_redoable_after_undo() -> None: manager = TaskManager([]) manager.add(Task()) assert not manager.is_redoable() manager.undo() assert manager.is_redoable()
def test_set_importance() -> None: manager = TaskManager([Task("fio")]) manager.set_importance(Task("fio"), Importance.Important) assert manager.tasks() == [Task("fio", importance=Importance.Important)]
def test_delete_with_invalid_task_is_noop() -> None: manager = TaskManager([Task("fiog")]) manager.delete(Task("fop")) assert manager.tasks() == [Task("fiog")]
import db import requests import json from datetime import datetime from taskmanager import TaskManager taskmanager = TaskManager() def register_new_appeal_json(json): subject_id = json["subject_id"] text_of_appeal = json["text"] start_date = datetime.now() _register_new_appeal(subject_id=subject_id, text=text_of_appeal, date=start_date) def get_appeal_information_json(appeal_id): appeal = db.get_appeal_by_id(appeal_id) d = {"appeal_id": appeal[0]} def _register_new_appeal(subject_id, text, date): new_task = db.add_new_appeal(subject_id, text, date) taskmanager.start_initial_analyze(text, new_task) if new_task != None: print(new_task) return new_task
def test_add() -> None: manager = TaskManager([Task("cvje")]) manager.add(Task("fjdp")) assert manager.tasks() == [Task("cvje"), Task("fjdp")]
def test_snooze() -> None: manager = TaskManager([Task("fgjnio")]) manager.snooze(Task("fgjnio"), date(4, 8, 5)) assert manager.tasks() == [Task("fgjnio", snooze=date(4, 8, 5))] manager.snooze(Task("fgjnio", snooze=date(4, 8, 5)), None) assert manager.tasks() == [Task("fgjnio", snooze=None)]
def test_schedule_task() -> None: manager = TaskManager([Task("g5")]) manager.schedule_task(Task("g5"), date(2, 5, 7)) assert manager.tasks() == [Task("g5", due=date(2, 5, 7))] manager.schedule_task(Task("g5", due=date(2, 5, 7)), None) assert manager.tasks() == [Task("g5", due=None)]
def test_set_complete_on_invalid_task_is_noop() -> None: manager = TaskManager([Task("gig")]) manager.set_complete(Task("dsoap")) assert manager.tasks() == [Task("gig")]
def test_set_complete() -> None: manager = TaskManager([Task("fj9")]) manager.set_complete(Task("fj9")) assert manager.tasks() == [Task("fj9", completed=date.today())] manager.set_complete(Task("fj9", completed=date.today()), False) assert manager.tasks() == [Task("fj9", completed=None)]
def test_replace() -> None: manager = TaskManager([Task("fnk"), Task("fkop")]) manager.replace(Task("fkop"), Task("f0ß")) assert manager.tasks() == [Task("fnk"), Task("f0ß")]
def synchronizer(tables, clear): config, logging = get_basic_utilities().get_utils((CONFIG, LOGGER)) redis_config = config['redis'] redis_helper = get_singleton_redis_client(redis_config['host'], redis_config['port'], redis_config['db']) try: generate_config_file() except (FileNotFoundError, Exception) as e: logging.error(f'unable to generate pm2 config file: {e}', exc_info=True) return False pm2_config_path = get_config_path() producer_process = PM2('arango-producer', pm2_config_path) consumer_process = PM2('clickhouse-consumer', pm2_config_path) task_manager = TaskManager(redis_helper) # clear redis cache db if specified if clear: redis_helper.client.flushdb() logging.info('redis cache cleared') else: # delete consumer specific keys for table in tables: for key in redis_helper.client.keys(f'{table}*'): redis_helper.client.delete(key) # stop the producer process if not producer_process.stop(): logging.error('unable to stop producer') return False # stop the consumer process for table in tables: consumer_active = task_manager.ping(table) if consumer_active: result = task_manager.stop_task(table) if result == Status.INACTIVE.name: logging.info(f'stopped the consumer {table}') else: logging.error(f'unable to stop consumer {table}') return False else: logging.info(f'consumer {table} not active') # delete topics all_deleted = delete_topics(tables) if not all_deleted: logging.error(f'unable to delete all kafka topics') return False # create topic for table in tables: created = create_topic(table) if not created: logging.error(f'unable to delete topic: {table}') return False # start producer process if not producer_process.start(): logging.error('unable to start producer') return False # sync existing collection data for table in tables: is_data_loaded = load_collection_data(collection=table, store_tick=True, batch_size=100000) if is_data_loaded: logging.info('existing data loaded to clickhouse') else: logging.error(f'failed to load {table} data') return False # start the consumer if task_manager.ping(table): result = task_manager.start_task(table) if result == Status.ACTIVE.name: logging.info(f'{table} consumer process started') else: logging.error('unable to start consumer, restarting using pm2') if consumer_process.restart(): logging.info('pm2 consumer restarted') else: logging.error('unable to restart pm2 consumers') return False return True
from task import Task from taskmanager import TaskManager t = Task(1, "TODO: Learn something", priority=3) t2 = Task(2, "TODO: Check bugs on a program", priority=1) t3 = Task(3, "TODO: Check for priority algorithm", priority=7) t4 = Task(4, "TODO: Talk to GIrlfriend", priority=3) t5 = Task(5, "TODO: Check covid-19 status", priority=2) t6 = Task(6, "TODO: Check algorithms updates", priority=10) t7 = Task(7, "TODO: Check covid-19 status", priority=2) t8 = Task(8, "TODO: Check algorithms updates", priority=10) task_manager = TaskManager() task_manager.addTask(t) #print(task_manager.binarySearchTree.binaryTree) task_manager.addTask(t2) print(task_manager) #task_manager.addTask(t3) #task_manager.addTask(t4) #task_manager.addTask(t5) #task_manager.addTask(t6) #task_manager.addTask(t7) #task_manager.addTask(t8) #print(task_manager) """Removing task"""
def test_remove_snooze() -> None: manager = TaskManager([Task("j54", snooze=date(5, 8, 1))]) manager.remove_snooze(Task("j54", snooze=date(5, 8, 1))) assert manager.tasks() == [Task("j54")]
def test_rename() -> None: manager = TaskManager([Task("gfio")]) manager.rename(Task("gfio"), "ggrgr") assert manager.tasks() == [Task("ggrgr")]
def test_delete() -> None: manager = TaskManager([Task("fmnk"), Task("jdoi")]) manager.delete(Task("fmnk")) assert manager.tasks() == [Task("jdoi")]
def main(): # top-level parser parser = argparse.ArgumentParser( epilog='Multi-word values' ' must be encased in quotes, like "that".') subparsers = parser.add_subparsers() # subparser for the "add" command create_parser = subparsers.add_parser('add', help='create new task') create_parser.set_defaults(action='add') create_parser.add_argument( '--name', type=valid_name, required=True, help='name of the task (required, max. 20 characters)', metavar='') create_parser.add_argument( '--deadline', type=valid_date, help='task\'s deadline in ISO format (yyyy-mm-dd)', metavar='') create_parser.add_argument('--description', type=str, help='description of the task', metavar='') # subparser for the "update" command update_parser = subparsers.add_parser('update', help='update task') update_parser.set_defaults(action='update') update_parser.add_argument('TASK_HASH', help='Task identifier') update_parser.add_argument('--name', type=str, help='name of the task', metavar='') update_parser.add_argument( '--deadline', type=valid_date, help='task\'s deadline in ISO format (yyyy-mm-dd)', metavar='') update_parser.add_argument('--description', type=str, help='description of the task', metavar='') # subparser for the "delete" command delete_parser = subparsers.add_parser('delete', help='delete task') delete_parser.set_defaults(action='delete') delete_parser.add_argument('TASK_HASH', type=str, help='Task identifier') # subparser for the "list" command list_parser = subparsers.add_parser('list', help='list tasks') list_parser.set_defaults(action='list') list_group = list_parser.add_mutually_exclusive_group(required=True) list_group.add_argument('--all', action='store_true') list_group.add_argument('--today', action='store_true') args = parser.parse_args() task_manager = TaskManager() if args.action == "add": task_manager.add_task(args.name, args.deadline, args.description) elif args.action == "update": task_manager.update_task(args.TASK_HASH, args.name, args.deadline, args.description) elif args.action == "delete": task_manager.delete_task(args.TASK_HASH) elif args.action == "list": if args.all: task_manager.list_tasks('all') else: task_manager.list_tasks('today')
def test_remove_due() -> None: manager = TaskManager([Task("bfg", due=date(5, 8, 1))]) manager.remove_due(Task("bfg", due=date(5, 8, 1))) assert manager.tasks() == [Task("bfg")]