def add_task(self, queue, data): queue = self._control_queue_name(queue) queue = self.config.PREFIX + ':' + queue task = Task(data, queue) self.get_redis_connection().rpush(task.get_queue_name(), task.toJson()) return True
def test_func_can_recv_data(self): def func(t): return t.queue.get() t = Task(func) t.send("ok") t.run() self.assertEqual(t.result, "ok")
def dequeue(self): """ Returns a :class:`~retask.task.Task` object from the queue. Returns ``None`` if the queue is empty. :return: :class:`~retask.task.Task` object from the queue If the queue is not connected then it will raise :class:`retask.ConnectionError` .. doctest:: >>> from retask.queue import Queue >>> q = Queue('test') >>> q.connect() True >>> t = q.dequeue() >>> print t.data {u'name': u'kushal'} """ if not self.connected: raise ConnectionError('Queue is not connected') if self.rdb.llen(self._name) == 0: return None data = self.rdb.rpop(self._name) task = Task() task.__dict__ = json.loads(data) return task
class App(object): def __init__(self): self.db = configs.create_engine() self.page_manager = db.PageManager(db=self.db) self.references_manager = db.ReferencesManager(db=self.db) self.worker_count = configs.WORKER_COUNT self.downloader = Downloader() self.parser = Parser() self.task_queue = QueueTask( page_manager=self.page_manager, size=4 * self.worker_count) self.task = Task( page_manager=self.page_manager, references_manager=self.references_manager, task_queue=self.task_queue, worker_count=self.worker_count, parser=self.parser, downloader=self.downloader ) def run(self): signal.signal( signal.SIGINT, (lambda signum, frame: self.task_queue.stop())) self.task.run()
def install(self): print "Installing " + self.name + "..." Task.install_packages(self) """ Open template and substitute variables """ f = open(self.nginx_template_url, "r") template = f.read() template = re.sub("{{domain_name}}", self.domain_name, template) template = re.sub("{{base_dir}}", self.base_dir, template) f = open(self.base_dir + "/" + self.domain_name + "/conf/nginx.conf", "w") f.write(template) f.close() """ Link files to nginx """ result = call( "ln -s " + self.base_dir + "/" + self.domain_name + "/conf/nginx.conf /etc/nginx/sites-available/" + self.domain_name, shell=True, ) result = call( "ln -s /etc/nginx/sites-available/" + self.domain_name + " /etc/nginx/sites-enabled/" + self.domain_name, shell=True, ) """ Restart Nginx """ result = call("/etc/init.d/nginx restart", shell=True) return True
def __init__(self, config, is_append=False): name = 'append_file_stat_task' if is_append else 'generate_file_stat_task' Task.__init__(self, name) self._config = config self._is_append = is_append self._stat_cache = {} self._cache_path = os.path.join(self._config['build_cache_dir'], 'stat_cache.json')
def menu(arguments): if arguments['list']: if arguments['--recursive']: task_lineage(arguments) else: task_list(arguments) else: try: if arguments['create']: Task.create(**_task_create_kwargs(arguments)) elif arguments['read']: task_read(arguments) elif arguments['set']: task_set(arguments) elif arguments['delete']: Task(arguments['<name>']).delete() elif arguments['reset']: task_reset(arguments) except NameError as exc: print(PACKAGE_NAME, 'Error!', exc) sys.exit(1)
def __init__(self, foot = core.Kick.RIGHT, desiredDistance = 2000.0): Task.__init__(self) self.kickRunning = False self.postKick = False self.foot = foot self.desiredDistance = desiredDistance self.sm = SimpleStateMachine(['startup', 'kicking', 'finish'])
def __init__(self, message, hosts): Task.__init__(self) self.__message = message self.__hosts = hosts with database.transaction() as t: t.execute("SELECT CURRENT_TIMESTAMP AT TIME ZONE 'UTC'") (self.__batch_time,) = t.fetchone()
def __init__(self, url, path=None, overwrite=False, **kwargs): Task.__init__(self) self.url = url self.path = path self.overwrite = overwrite self.kwargs = kwargs self.size = 0
def ffmpeg(arg): torun = Task(createfrom=arg) # Get the output file extension: destExtension = torun.outfile.split(".")[-1] print torun.outfile.split(".") tmpfile = "tmp." + destExtension codeccheck = subprocess.Popen(["ffmpeg", "-codecs"], stdout=subprocess.PIPE) allcodecs = codeccheck.communicate()[0] if allcodecs.find("libfdk_aac") == -1: print "Install libfdk_aac for better audio quality!" if torun.forcefdk == True: print "Server has disabled workers without libfdk_aac!" sys.exit() else: if "libfdk_aac" in torun.arguments: libfdk_pos = torun.arguments.index("libfdk_aac") torun.arguments[libfdk_pos] = "aac" torun.arguments.append("-strict") torun.arguments.append("-2") out=subprocess.call(["ffmpeg","-i", torun.infile]+torun.arguments+[tmpfile]) if not out == 0: print "FFMPEG FAILED" else: shutil.move(tmpfile,torun.outfile) os.remove(torun.infile) try: os.remove(tmpfile) except: pass
def __init__(self, conf): Task.__init__(self,conf) self.name = 'Django Install' self.addRequiredFields(['django_url','base_dir','domain_name']) self.packages = ['cron','python-mysqldb'] self.checkRequiredConf(conf)
def create_valid_task(): unit_id = "3c82ef61-0875-41d3-99ba-101672d79d6b" task_id = uuid.uuid4().hex valid_json_dict = { "title": "Blank", "task_id": task_id, "chronicle": [ { "time_ms": 0.3, "unit_id": unit_id, "hop": 1 } ], "from_unit": unit_id, "to_unit": "a789ada1-edb5-4cfe-b1f9-8584abbf8a2f", "command": { "announce": {} }, "response": {} } valid_command = Task(unit_id, **valid_json_dict) valid_response = Task(unit_id, **valid_json_dict) valid_response.add_response(response = {"response":"yes"}) return valid_command, valid_response
def test_func_called(self): func = Mock() func.return_value = "ok" t = Task(func, "hello", hello="world") t.run() func.assert_called_once_with(t) self.assertEqual(t.result, "ok")
def read_tasks(cls, infile): tasks = [] default_tz = pytz.utc for event, elem in etree.iterparse(infile, events=("start", "end")): if event == "start": if elem.tag == "defaults": in_defaults = True else: # Stop reading as soon as the </tasks> tag is encountered. if elem.tag == "tasks": break elif elem.tag == "defaults": in_defaults = False # Otherwise, parse each <task> element in accordance to the way # it was output. elif elem.tag == "task": attrs = elem.attrib # XXX When project of a task becomes something more than # just a string, the code will probably break on the # following line. if "project" in attrs: project = attrs["project"] else: project = "" task = Task(name=elem.text, project=project, id=int(attrs["id"])) if "done" in attrs: task.done = bool(int(attrs["done"])) if "time" in attrs: task.time = cls._timedelta_fromrepr(attrs["time"]) if "deadline" in attrs: task.deadline = cls._read_time(attrs, "deadline", default_tz=default_tz) tasks.append(task) elif elem.tag == "timezone" and in_defaults: default_tz = pytz.timezone(elem.text) return tasks
def __init__(self, robot, link, target, **kwargs): if hasattr(target, 'robot_link'): # used for ROS communications target.robot_link = link.index # dirty elif type(target) is list: target = array(target) def _pos_residual(target_pose): residual = target_pose - link.pose if dot(residual[0:4], residual[0:4]) > 1.: return _oppose_quat * target_pose - link.pose return residual if hasattr(target, 'pose'): def pos_residual(): return _pos_residual(target.pose) elif type(target) is ndarray: def pos_residual(): return _pos_residual(target) else: # link frame target should be a pose raise Exception("Target %s has no 'pose' attribute" % type(target)) def jacobian(): return robot.compute_link_pose_jacobian(link) self.link = link Task.__init__(self, jacobian, pos_residual=pos_residual, **kwargs)
def __init__(self, conf): Task.__init__(self,conf) self.name = 'MySql' self.packages = ['mysql-server','mysql-client'] self.addRequiredFields(['mysql_username','mysql_database_name','mysql_password']) Task.checkRequiredConf(self,conf)
def __init__(self, conf): Task.__init__(self, conf) self.name = "Nginx" self.addRequiredFields(["base_dir", "domain_name", "nginx_template_url"]) self.packages = ["nginx"] self.checkRequiredConf(conf)
def wait(self, wait_time=0): """ Returns a :class:`~retask.task.Task` object from the queue. Returns ``False`` if it timeouts. :arg wait_time: Time in seconds to wait, default is infinite. :return: :class:`~retask.task.Task` object from the queue or False if it timeouts. .. doctest:: >>> from retask.queue import Queue >>> q = Queue('test') >>> q.connect() True >>> task = q.wait() >>> print task.data {u'name': u'kushal'} .. note:: This is a blocking call, you can specity wait_time argument for timeout. """ if not self.connected: raise ConnectionError('Queue is not connected') data = self.rdb.brpop(self._name, wait_time) if data: task = Task() task.__dict__ = json.loads(data[1]) return task else: return False
def get(self, project, queue, task): """ Get the named task in a queue. Args: project: A string containing an application ID. queue: A string containing a queue name. task: A string containing a task ID. """ task = Task({'id': task, 'queueName': queue}) requested_fields = self.get_argument('fields', None) if requested_fields is None: fields = TASK_FIELDS else: fields = parse_fields(requested_fields) omit_payload = False if 'payloadBase64' not in fields: omit_payload = True queue = self.queue_handler.get_queue(project, queue) if queue is None: write_error(self, HTTPCodes.NOT_FOUND, 'Queue not found.') return task = queue.get_task(task, omit_payload=omit_payload) self.write(json.dumps(task.json_safe_dict(fields=fields)))
def create_task(self, **kwargs): """Create a :class:`Task`.""" t = Task(watcher=[self], **kwargs) t.owner = self db.session.add(t) db.session.commit() return t
def ffmpeg(arg): torun = Task(createfrom=arg) codeccheck = subprocess.Popen(["ffmpeg", "-codecs"], stdout=subprocess.PIPE) allcodecs = codeccheck.communicate()[0] if allcodecs.find("libfdk_aac") == -1: print "Install libfdk_aac for better audio quality!" if torun.forcefdk == True: print "Server has disabled workers without libfdk_aac!" sys.exit() else: libfdk_pos = torun.arguments.find("libfdk_aac") torun.arguments[libfdk_pos] = "aac" torun.arguments.append("-strict") torun.arguments.append("-2") out=subprocess.call(["ffmpeg","-i", torun.infile]+torun.arguments+["tmp.mp4"]) if not out == 0: print "FFMPEG FAILED" else: os.remove(torun.infile) shutil.move("tmp.mp4",torun.outfile) try: os.remove("tmp.mp4") except: pass
def add_function(subparsers, module, funcname): func = getattr(module, funcname) if getattr(func, 'ignore', False) or not inspect.isfunction(func): return depends = getattr(func, "depends", None) task = Task(funcname, func, depends) rules[funcname] = task subparser = subparsers.add_parser(funcname, help=func.__doc__) args, varargs, keywords, defaults = inspect.getargspec(func) defaults = defaults or [] args = args or [] n_args = len(args) - len(defaults) for arg in args[:n_args]: subparser.add_argument(arg) task.args.append(arg) if varargs: subparser.add_argument(varargs, nargs="*") task.varargs = varargs for arg, default in zip(args[n_args:], defaults): task.args.append(arg) task.defaults[arg] = default name = ('-' if len(arg) == 1 else '--') + arg if isinstance(default, bool): action = "store_" + str(not default).lower() subparser.add_argument(name, default=default, action=action) elif isinstance(default, int): subparser.add_argument(name, default=default, action="store", type=int) else: subparser.add_argument(name, default=default, action="store")
def calculate(self, loadcases, input_list): """ For all items in input_list create Task instance and run them. Return ids of created Tasks """ if not isinstance(loadcases, list): loadcases = [loadcases] # if some loadcases needed in filetransfer, run ftp server # for loadcase in loadcases: # if loadcase.need_filetransfer: # run_fileserver() # break result_ids = [] if isinstance(input_list, dict): input_list = _dict_to_list(input_list) # if even though one loadcase need local work, run all all loadcases on client computer # TODO think about case, when client couldn't run particular loadcase is_local_loadcases = False for lc in loadcases: is_local_loadcases |= lc.is_local for item in input_list: task = Task(loadcases, item) if not self._is_local_work and not is_local_loadcases: async_result = remote_run.delay(task) task.id = async_result.task_id result_ids.append(task.id) else: self._task_counter += 1 result_ids.append(self._task_counter) #TODO error handling self._id_to_task[self._task_counter] = local_run(task) return result_ids
def aggregate(self, ns_density=None): """ to aggregate cluster-level tasks into bay-level tasks :param ns_density: prefix:`noon-simultaneity density``, type:``float``, range:``[0, 1]`` :return: None """ if ns_density is None: ns_density = self.parameter.ns_density v = self.clone() v.parameter.p_density = 0.0 v.parameter.ns_density = ns_density v.precedence = [] v.non_simultaneity = [] # create aggregated bay-level tasks # remove all the cluster-level tasks # insert bay-level tasks into bays v.tasks = [] for b in v.bays: t = Task() t.processing_time = b.aggregate_task_processing_time t.index = b.index b.empty() b.append(t) v.tasks.append(t) # generate non-simultaneity set v.generate_non_simultaneity(ns_density) return v
def __init__(self, project, name): Task.__init__(self) Persistent.__init__(self, project.persistent, name) self.name = name self.fhs = FHS.shared(project) project.option_collector.option_decls.add(self.__class__) self.check_missing = project.options.get('check-missing', 'yes') != 'no'
def start_epc(self): #launch HSS, wait for prompt log("INFO: ALU test: run HSS") self.task_hss = Task("actions/alu_hss.bash", "alu_hss", self.epc_machine, self.oai_user, self.oai_password, self.env, self.logdir + "/alu_hss." + self.epc_machine, event=self.event) self.task_hss.waitlog('S6AS_SIM-> ') #then launch EPC, wait for connection on HSS side log("INFO: ALU test: run EPC") task_epc = Task("actions/alu_epc.bash", "ALU EPC", self.epc_machine, self.oai_user, self.oai_password, self.env, self.logdir + "/alu_epc." + self.epc_machine) ret = task_epc.wait() if ret != 0: log("ERROR: EPC start failure"); raise TestFailed() self.task_hss.waitlog('Connected\n')
def spawnImmediateTask(self, jobid, taskid_complex=None, params=None): ''' Run task immediately using basic configuration of job @param jobid:int @param taskid_complex: string @param params: string ''' job = Job.getByJobID(jobid) if not job: raise ValueError("Job(id:%d) is not existed or not enabled." % (jobid)) if params : job.set_command(job._substituteReservedWord(params)) if taskid_complex: task = Task(job.get_jobid(), uuid.uuid1().hex, datetime.now(), job.getCommandToExcute(), job.get_retry(), job, job.get_depend(), taskid_complex) else: task = Task(job.get_jobid(), uuid.uuid1().hex, datetime.now(), job.getCommandToExcute(), job.get_retry(), job, job.get_depend()) t = TaskRunner(0, task, self._status, self, params) t.daemon = True task.save() self._status.add_waiting_task(task, t) t.start() return task
def start_enb_rru_rcc(self, rru_config_file, rcc_config_file): #copy wanted configuration files quickshell("sshpass -p " + self.oai_password + " scp config/" + rru_config_file + " " + self.oai_user + "@" + self.enb_rru_machine+":/tmp/enb.conf") quickshell("sshpass -p " + self.oai_password + " scp config/" + rcc_config_file + " " + self.oai_user + "@" + self.enb_rcc_machine+":/tmp/enb.conf") #run RRU/RCC softmodem log("INFO: ALU test: run RRU softmodem with configuration file " + rru_config_file) self.task_rru_enb = Task("actions/run_enb.bash", "run_RRU_softmodem", self.enb_rru_machine, self.oai_user, self.oai_password, self.env, self.logdir + "/run_softmodem." + self.enb_rru_machine, event=self.event) self.task_rru_enb.waitlog('[RRH] binding to') log("INFO: ALU test: run RCC softmodem with configuration file " + rcc_config_file) self.task_rcc_enb = Task("actions/run_enb.bash", "run_RCC_softmodem", self.enb_rcc_machine, self.oai_user, self.oai_password, self.env, self.logdir + "/run_softmodem." + self.enb_rcc_machine, event=self.event) self.task_rcc_enb.waitlog('[BBU] local ip addr') #wait for RRU and RCC to be connected self.task_rru_enb.waitlog('devices ok (eNB_thread_asynch_rx)')
def compile_enb(self, build_arguments, log_suffix=""): log("INFO: ALU test: compile softmodem on " + self.enb_machine) envcomp = list(self.env) envcomp.append('BUILD_ARGUMENTS="' + build_arguments + '"') #we don't care about BUILD_OUTPUT but required (TODO: change that) envcomp.append('BUILD_OUTPUT=/') logdir = self.logdir + "/compile_log" remote_files = "'/tmp/oai_test_setup/oai/cmake_targets/log/*'" post_action = "mkdir -p "+ logdir + \ " && sshpass -p " + self.oai_password + \ " scp -r " + self.oai_user + \ "@" + self.enb_machine + ":" + remote_files + " " + logdir + \ " || true" task = Task("actions/compilation.bash", "compile_softmodem", self.enb_machine, self.oai_user, self.oai_password, envcomp, self.logdir + "/compile_softmodem." + log_suffix + \ self.enb_machine, post_action=post_action) ret = task.wait() if ret != 0: log("ERROR: softmodem compilation failure"); raise TestFailed() task.postaction()
action_repeat = 3 buffer_size = 1000000 batch_size = 64 gamma = 0.99 tau = 0.001 actor_dropout = 0.25 critic_dropout = 0.25 exploration_theta = 0.2 exploration_sigma = 0.3 actor_lr = 0.001 critic_lr = 0.001 task = Task(action_repeat=action_repeat, init_pose=init_pose, init_velocities=init_velocities, init_angle_velocities=init_angle_velocities, runtime=runtime, target_pos=target_pos) agent = DDPG(task=task, buffer_size=buffer_size, batch_size=batch_size, gamma=gamma, tau=tau, actor_dropout=actor_dropout, critic_dropout=critic_dropout, exploration_theta=exploration_theta, exploration_sigma=exploration_sigma, actor_lr=actor_lr, critic_lr=critic_lr) percentage = 10
def randomInitLayerBased(self, id, noOfTasks, alpha, processorDag, ccr): self.id = id self.alpha = alpha # generate randomly number of layers of the graph minLayers = self.__class__.MIN_LAYERS maxLayers = round(math.sqrt(noOfTasks) / alpha * 2 - minLayers) self.height = int(round(random.uniform(minLayers, maxLayers))) # determine the min and max number of task nodes per layer minNodesPerLayer = self.__class__.MIN_NODES_PER_LAYER maxNodesPerLayer = round( math.sqrt(noOfTasks) * alpha * 2 - minNodesPerLayer) # initialize a list of empty tasks (with 2 dummy tasks: entry and exit) for i in range(0, noOfTasks + 2): self.tasks.append(Task(i, 0, 0, 0, 0)) # add the 'dummy' entry task into the 0th 'dummy' layer self.layers.append([self.tasks[0]]) # a counter to keep track of the current task node currentNodeId = 1 # loop through the graph height to assign task nodes # but leave the last one for 'special' assignments for i in range(1, self.height): # generate randomly number of nodes for each graph layer layerWidth = int( round(random.uniform(minNodesPerLayer, maxNodesPerLayer))) newLayer = [] for j in range(0, layerWidth): newLayer.append(self.tasks[currentNodeId]) # generate random constraint values for tasks self.tasks[currentNodeId].generateRandomValues() # set layer id to task in which task belongs to self.tasks[currentNodeId].layerId = i currentNodeId += 1 if (currentNodeId == noOfTasks): break # add the new layer into the graph self.layers.append(newLayer) # stop when the total number of tasks reach the pre-defined number if (currentNodeId == noOfTasks): break # add all the tasks left to the 'last' layer newLayer = [] for i in range(currentNodeId, noOfTasks + 1): newLayer.append(self.tasks[currentNodeId]) self.tasks[currentNodeId].generateRandomValues() self.tasks[currentNodeId].layerId = self.height currentNodeId += 1 self.layers.append(newLayer) # add a 'dummy' layer for 'dummy' exit task self.layers.append([self.tasks[noOfTasks + 1]]) self.tasks[noOfTasks + 1].layerId = self.height + 1 # ccr self.ccr = ccr self.processorDag = processorDag # generate edges for the graph for i in range(1, len(self.layers) - 2): possibleDestinationNodes = [] # collect all nodes in the higher layers and make them as potential destination nodes for j in range(i + 1, len(self.layers) - 1): possibleDestinationNodes.extend(self.layers[j]) # loop through all nodes of the current layer for j in range(0, len(self.layers[i])): # generate randomly number of out-links for each node outDegree = random.randint(self.__class__.MIN_OUT_DEGREE, len(possibleDestinationNodes)) # choose a random subset of nodes for the collection of potential destination nodes destinationNodes = random.sample(possibleDestinationNodes, outDegree) # establish links between the current node to all nodes in the selected subset for k in range(0, len(destinationNodes)): self.layers[i][j].addEdgeRandomConstraint( destinationNodes[k], self.ccr, self.processorDag) # loop through all the task nodes for i in range(1, noOfTasks + 1): # create links between nodes with no predecessors to the dummy entry task node if (len(self.tasks[i].predecessors) == 0): self.tasks[0].addEdge(self.tasks[i], 0) # create links between nodes with no successors to the dummy exit task node if (len(self.tasks[i].successors) == 0): self.tasks[i].addEdge(self.tasks[noOfTasks + 1], 0) # update graph's height with 'real' number of layers self.height = len(self.layers) pass
def test_create_result_list(self): res = Task.select().where(Task.employee_name == self.test_ename) self.assertEqual(task_search.create_result_list(res), 1)
def __init__(self, config, project_info): Task.__init__(self, 'build_base_resource_task') self.__init_attributes()
def randomTasks(a, b): return [Task(distrib) for _ in range(random.randint(a, b))]
def addTask(self, name: str, date): id = self.findUniqueId() task = Task(name=name, due_date=date) self.tasks.append(task) Loader.addTask(task=task)
if not (await user.handle_login_status()): sys.exit(-1) loop.run_until_complete(asyncio.wait([login_all(users)])) danmu_connection = connect.connect( dict_user['other_control']['default_monitor_roomid']) list_raffle_connection = [connect.RaffleConnect(i) for i in range(1, 5)] list_raffle_connection_task = [i.run() for i in list_raffle_connection] yjconnection = connect.YjConnection( dict_user['other_control']['raffle_minitor_roomid']) var_super_user = SuperUser(users[0]) raffle = RaffleHandler(users, var_super_user, loop, True) normal_task = Task(users, var_super_user, loop) state_task = StateTask(users, var_super_user, loop) var_console = bili_console.Biliconsole(users, var_super_user, loop) console_thread = threading.Thread(target=var_console.cmdloop) console_thread.start() normal_task.init() tasks = [ raffle.join_raffle(), danmu_connection.run(), state_task.run_workstate(), state_task.run_timestate(), yjconnection.run() ]
def main(): print("\n==== Fish Tank Records App ====\n") # leave closes the app when True leave = False while not leave: # lists used for choices valid_binary_choices = [1, 2] task_list = ['clean', 'feed', 'clean and feed'] valid_user_choices = [1, 2, 3] # loop until user chooses valid choice for date date_choice = None while date_choice not in valid_binary_choices: print("Add a task from today or a previous day?") print("Enter '1' for today") print("Enter '2' for previous day") date_choice = int(input()) # set the date variable based on user choice valid = False # date is set to today if date_choice == 1: date_in = date.today() # date is set to custom date else: while not valid: print("When did you do the task?") print("Enter in this form: YYYY-MM-DD") date_in = input() # check validity of date format year, month, day = date_in.split("-") # check for correct length of parts if len(year) == 4 and len(month) == 2 and len(day) == 2: # check for hyphens as separators if date_in[4] == '-' and date_in[7] == '-': # check that values are ints try: yr = int(year) mn = int(month) dy = int(day) # check month and day for valid values if 0 < mn < 13 and 0 < dy < 32: # sets valid to true if date is in correct format valid = True else: print("\nInvalid Format!\n") # catches ValueError from casting values to int except ValueError: print("\nInvalid Format!\n") else: print("\nInvalid Format!\n") user_input = None # loops until user chooses valid choice for task while user_input not in valid_user_choices: print("\nWhat did you do?") print("Enter '1' for clean") print("Enter '2' for feed") print("Enter '3' for clean and feed") user_input = int(input()) task_in = task_list[user_input - 1] # creates task obj of data task_obj = Task(date_in, task_in) # add new line to csv file with new task item with open('fishTankRecord.csv', 'a', newline='') as file_name: file_writer = csv.writer(file_name) file_writer.writerow([task_obj.date, task_obj.work]) file_name.close() # asks user if more input is needed user_input = None while user_input not in valid_binary_choices: print("Do you want to enter another task?") print("Enter '1' for Yes") print("Enter '2' for No") user_input = int(input()) # sets leave to true and closes app if no more input is needed if user_input == 2: leave = True
def test_sat_solve(): op1 = Operator('op1', set(), {'a'}, set()) op2 = Operator('op2', set('a'), set('b'), set()) op3 = Operator('op3', set(), {'a', 'b', 'c'}, set()) op4 = Operator('op4', {'b'}, {'c'}, set()) op5 = Operator('op5', {'b', 'c'}, {'d'}, set()) op6 = Operator('op6', {'d'}, {'e', 'f'}, set()) op7 = Operator('op7', {'a', 'c', 'f'}, {'g'}, set()) task0 = Task('task0', {'a'}, {'a'}, {'a'}, [op1, op2]) task1 = Task('task1', {'a'}, set(), {'a'}, [op1, op2]) task2 = Task('task2', {'a', 'b'}, set(), {'b'}, [op1, op2]) task3 = Task('task3', {'a', 'b', 'c'}, set(), {'c'}, [op1, op2]) task4 = Task('task4', {'a', 'b', 'c'}, set(), {'c'}, [op1, op2, op3]) task5 = Task('task5', {'a', 'b', 'c'}, set(), {'c'}, [op1, op2, op4]) task6 = Task('task6', {'a', 'b', 'c', 'd'}, {'a'}, {'d'}, [op2, op4, op5]) task7 = Task('task7c', {'a', 'b', 'c', 'd'}, {'a'}, {'d'}, [op3, op5]) task8 = Task('task8', {'a', 'b', 'c', 'd', 'e', 'f', 'g'}, {'a'}, {'g'}, [op2, op3, op4, op5, op6, op7]) op_a = Operator('op_a', set(), {'a'}, set()) op_b = Operator('op_b', {'a'}, {'b'}, set()) op_c = Operator('op_c', {'b'}, {'c'}, set()) op_d = Operator('op_d', {'c'}, {'d'}, set()) op_e = Operator('op_e', {'d'}, {'e'}, set()) op_f = Operator('op_f', {'e'}, {'f'}, set()) task_d = Task('task_a', {'a', 'b', 'c', 'd'}, set(), {'d'}, [op_a, op_b, op_c, op_d]) task_e = Task('task_b', {'a', 'b', 'c', 'd', 'e'}, set(), {'e'}, [op_a, op_b, op_c, op_d, op_f]) op_facts = Operator( 'op_facts', set(), { 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w' }, set()) task_facts = Task( 'task_facts', { 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w' }, set(), {'v', 'w'}, [op_facts]) op_delete_pre = Operator('delete_pre', {'a'}, {'b'}, {'a'}) task_op_delete_pre = Task('op_delete_pre', {'a', 'b'}, {'a'}, {'b'}, [op_delete_pre]) # Miconic: prob00.pddl (2 floors, 1 person): # <Op (depart f1 p0), PRE: frozenset({'(lift-at f1)', '(boarded p0)'}), # ADD: frozenset({'(served p0)'}), DEL: frozenset({'(boarded p0)'})>, # <Op (board f0 p0), PRE: frozenset({'(lift-at f0)'}), # ADD: frozenset({'(boarded p0)'}), DEL: frozenset()>, # <Op (up f0 f1), PRE: frozenset({'(lift-at f0)'}), # ADD: frozenset({'(lift-at f1)'}), DEL: frozenset({'(lift-at f0)'})>] op_depart = Operator('depart', {'high', 'boarded'}, {'served'}, {'boarded'}) op_board = Operator('board', {'low'}, {'boarded'}, set()) op_up = Operator('up', {'low'}, {'high'}, {'low'}) task_simple_miconic = Task('miconic-simple', {'low', 'high', 'boarded', 'served'}, {'low'}, {'served'}, [op_depart, op_board, op_up]) expected = [(task0, []), (task1, [op1]), (task2, [op1, op2]), (task3, None), (task4, [op3]), (task5, [op1, op2, op4]), (task6, [op2, op4, op5]), (task7, [op3, op5]), (task_facts, [op_facts]), (task_op_delete_pre, [op_delete_pre]), (task_simple_miconic, [op_board, op_up, op_depart])] for task, plan in expected: yield check_plan, task, plan
import py.test from search import sat from search import minisat from task import Operator, Task import tools fact1 = "at-station" op1 = Operator('op1', set(), {'a'}, set()) op2 = Operator('op2', set(), set(), {'c'}) op3 = Operator('op3', ['d'], ['a'], []) op4 = Operator('op4', [], ['b'], []) task1 = Task('task1', {'a'}, set(), {'a'}, [op1]) task2 = Task('task2', {'a', 'd'}, {'d'}, {'a'}, [op1, op3]) task3 = Task('task3', {'a', 'b'}, set(), {'a', 'b'}, [op1, op4]) task4 = Task('task4', {'a', 'd'}, {'d'}, {'a'}, [op3]) task5 = Task('trivial', {'a'}, {'a'}, {'a'}, []) aux_a_iff_b = [['a<->b', 'a', 'b'], ['a<->b', 'not-a', 'not-b'], ['not-a<->b', 'a', 'not-b'], ['not-a<->b', 'not-a', 'b']] aux_a_and_b = [['not-aANDb', 'a'], ['not-aANDb', 'b'], ['not-a', 'not-b', 'aANDb']] def sort_formula(formula): # Move all literals to the front and all subformulas to the back. strings = [part for part in formula if isinstance(part, str)]
test_file = os.path.join(data_path, f) else: raise ValueError('unknown dataset type') train_set = read_instances_from_file(train_file) train_set.add_field('task_id', [task_id] * len(train_set)) train_set.apply(lambda x: vocab.add_word_lst(x['words'])) dev_set = read_instances_from_file(dev_file) dev_set.add_field('task_id', [task_id] * len(dev_set)) dev_set.apply(lambda x: vocab.add_word_lst(x['words'])) test_set = read_instances_from_file(test_file) test_set.add_field('task_id', [task_id] * len(test_set)) # test_set.apply(lambda x: vocab.add_word_lst(x['words'])) task = Task(task_id, k, train_set, dev_set, test_set) task_lst.append(task) logger.info('Building vocabulary...') vocab.build_vocab() logger.info('Finished. Size of vocab: {}.'.format(len(vocab))) for task in task_lst: task.train_set.apply(lambda x: [vocab.to_index(w) for w in x['words']], new_field_name='words_idx') task.dev_set.apply(lambda x: [vocab.to_index(w) for w in x['words']], new_field_name='words_idx') task.test_set.apply(lambda x: [vocab.to_index(w) for w in x['words']], new_field_name='words_idx')
def __init__(self): Task.__init__(self) self.action = -1 self.action_str = TaskProtocol.look_up_key( TaskProtocol.task_action_dict, self.action)
def post(self): self.response.headers['Content-Type'] = 'text/html' msg ='' added_user='' template_values={} current_tb_key = ndb.Key (urlsafe=self.request.get('current_tb_key')) current_tb= current_tb_key.get() user = users.get_current_user() member_users = ndb.get_multi (current_tb.invited_users) total_user = User.query() total_user = total_user.fetch() exits = False b = self.request.get('button') owner_user = self.request.get('owner_user') if owner_user: owner_user = ndb.Key(urlsafe= owner_user) if b == 'Invite': if user.email() == current_tb.creator.get().email_address: if self.request.get('added_user') != 'None': added_user_key = self.request.get('added_user') added_user_key = ndb.Key(urlsafe=added_user_key) added_user = added_user_key.get() current_tb.invited_users.append(added_user_key) current_tb.put() added_user.taskBoards.append(current_tb_key) added_user.put() self.redirect('/display?key_name=' + str(current_tb_key.urlsafe())) elif self.request.get('added_user') == 'None': self.redirect('/display?key_name=' + str(current_tb_key.urlsafe())) else: self.redirect('/') elif b == 'Add': exists = False task = Task() if len(self.request.get('title').strip()) > 0: if(current_tb.tasks !=None): for task1 in current_tb.tasks: if task1.get().title == self.request.get('title'): exists = True add_msg = "Title already exists" if exists == False: task.title = self.request.get('title') # if str(datetime.datetime.today()) > task_due: # today_date = datetime.today().strftime("%Y-%m-%d") today = datetime.strptime(today_date,"%Y-%m-%d") task.due_date = datetime.strptime(self.request.get('due_date'), "%Y-%m-%d") self.response.write(today_date) self.response.write("----------") self.response.write(datetime.strptime(self.request.get('due_date'), "%Y-%m-%d")) task.checked = False if self.request.get('assign_user') != 'None': assigned_user_key = self.request.get('assign_user') assigned_user_key = ndb.Key(urlsafe=assigned_user_key) task.assigned_to = assigned_user_key if task.due_date >= today: task_key =task.put() current_tb.tasks.append(task_key) current_tb.put() add_msg = "Task is added" else: add_msg = "Enter a valid due date" self.redirect('/display?key_name=' + str(current_tb_key.urlsafe())+'&add_msg=' +add_msg) # template_values = { # 'msg':msg, # 'key_name' : current_tb_key.urlsafe(), # 'current_tb_key' : current_tb_key.urlsafe(), # 'current_tb':current_tb, # 'user':user, # 'owner_user': owner_user, # 'member_users':member_users, # 'total_user':total_user # } # template = JINJA_ENVIRONMENT.get_template ('display.html') # self.response.write (template.render (template_values)) else: self.redirect('/display?key_name=' + str(current_tb_key.urlsafe())+'&add_msg=' +add_msg) else: add_msg = 'Invalid title' self.redirect('/display?key_name=' + str(current_tb_key.urlsafe())+'&add_msg=' +add_msg) elif b == 'Rename': if len(self.request.get('Name').strip()) > 0: Name = self.request.get('Name') current_tb.name = Name current_tb.put() msg = '' #redirect ckeyyumbol if else um redirect cheyyuka self.redirect('/display?key_name=' + str(current_tb_key.urlsafe())) else: self.redirect('/display?key_name=' + str(current_tb_key.urlsafe())) # template_values = { # 'msg':msg, # 'key_name' : current_tb_key.urlsafe(), # 'current_tb_key' : current_tb_key.urlsafe(), # 'current_tb':current_tb, # 'user':user, # 'owner_user': owner_user, # 'member_users':member_users, # 'total_user':total_user # } # template = JINJA_ENVIRONMENT.get_template ('display.html') # self.response.write (template.render (template_values)) #delete button if self.request.get('button') == 'Delete': task_key = self.request.get('task_key') task_key = ndb.Key(urlsafe = task_key) current_tb.tasks.remove(task_key) current_tb.put() task_key.delete() self.redirect('/display?key_name=' + str(current_tb_key.urlsafe())) # template_values = { # 'msg':msg, # 'key_name' : current_tb_key.urlsafe(), # 'current_tb_key' : current_tb_key.urlsafe(), # 'current_tb':current_tb, # 'user':user, # 'owner_user': owner_user, # 'member_users':member_users, # 'total_user':total_user # } # template = JINJA_ENVIRONMENT.get_template ('display.html') # self.response.write (template.render (template_values)) #checked tasks checked_value = self.request.get('completed') if checked_value: #it is checked checked_task_key = ndb.Key(urlsafe = checked_value) checked_task = checked_task_key.get() checked_task.checked = True checked_task.completion_date = datetime.now() + timedelta(hours=1) checked_task.put() self.redirect('/display?key_name=' + str(current_tb_key.urlsafe())) # template_values = { # 'msg':msg, # 'key_name' : current_tb_key.urlsafe(), # 'current_tb_key' : current_tb_key.urlsafe(), # 'current_tb':current_tb, # 'user':user, # 'owner_user': owner_user, # 'member_users':member_users, # 'total_user':total_user # } # template = JINJA_ENVIRONMENT.get_template ('display.html') # self.response.write (template.render (template_values)) #remove user if self.request.get('button') == 'Remove': if self.request.get('removed_user') != 'None': removed_user_key_1 = self.request.get('removed_user') removed_user_key = ndb.Key(urlsafe= removed_user_key_1) removed_user = removed_user_key.get() #making the tasks of removed user unassigned tasks = ndb.get_multi(current_tb.tasks) for task1 in tasks: if task1.assigned_to == removed_user_key.get().key: task1.assigned_to = None task1.put() #removing from invited user list current_tb.invited_users.remove(removed_user_key) current_tb.put() member_users = ndb.get_multi (current_tb.invited_users) #remove current task board from removed users taskboard list removed_user.taskBoards.remove(current_tb_key) removed_user.put() msg = '' self.redirect('/display?key_name=' + str(current_tb_key.urlsafe())) # template_values = { # 'msg':msg, # 'key_name' : current_tb_key.urlsafe(), # 'current_tb_key' : current_tb_key.urlsafe(), # 'current_tb':current_tb, # 'user':user, # 'owner_user': owner_user, # 'member_users':member_users, # 'total_user':total_user # } # template = JINJA_ENVIRONMENT.get_template ('display.html') # self.response.write (template.render (template_values)) if self.request.get('removed_user') == 'None': self.redirect('/display?key_name=' + str(current_tb_key.urlsafe())) #Removing the task board if self.request.get('button') == 'Remove the Task Board': tasks = ndb.get_multi(current_tb.tasks) if len(member_users) == 0 and len(tasks) ==0: ou = owner_user.get() ou.taskBoards.remove(current_tb_key) ou.put() current_tb_key.delete() self.redirect('/') else: board_msg = 'Delete all the tasks and and remove all members to remove the task board ' self.redirect('/display?key_name=' + str(current_tb_key.urlsafe())+'&board_msg=' +board_msg) if self.request.get('button') == 'Back to home': self.redirect('/')
def __eq__(self, other): # override '==' operator if (hasattr(other, 'action')): return Task.__eq__(self, other) and self.action == other.action else: return Task.__eq__(self, other)
def add_task(self, task_text, id): self.tasks.append(Task(task_text, id))
def __repr__(self): return "[%s] \n %s" % (self.action_str, Task.__repr__(self))
def __init__(self, config): Task.__init__(self, 'generate_file_stat_task') self._config = config self._stat_cache = {}
def __init__(self, invoker, original_changed_files, changed_files): Task.__init__(self, 'gradle_aapt_task') self._invoker = invoker self._original_changed_files = original_changed_files self._changed_files_ref = changed_files
def new_task(self): task = Task() task.set_taskname(input("Task Name: ")) task.set_taskdesc(input("Description: ")) self.taskboard.append(task)
def __init__(self): Task.__init__(self, 'read_project_info_task')
def test_show_results_return(self): res = Task.select().where(Task.employee_name == self.test_ename) task_search.reset_results() task_search.create_result_list(res) with patch('builtins.input', side_effect=['R']): self.assertEqual(task_search.show_results(), 1)
def __init__(self, config): Task.__init__(self, 'generate_project_info_task')
def importDag(self, inputFilePath): with open(inputFilePath, "r") as input: lines = input.readlines() noOfTasks = int(''.join(char for char in lines[0] if char.isdigit())) self.tasks = [] for i in range(0, noOfTasks + 2): self.tasks.append(Task(i, 0, 0, 0, 0)) currentLineIndex = 3 for i in range(1, noOfTasks + 2): detailsOfRow = [ float(number) for number in lines[currentLineIndex].split() ] currentTask = self.tasks[int(detailsOfRow[0])] currentTask.computationRequired = detailsOfRow[1] currentTask.storageRequired = detailsOfRow[2] currentTask.memoryRequired = detailsOfRow[3] # number of predecessors of the current task noOfPredecessors = int(detailsOfRow[4]) currentLineIndex += 1 for j in range(0, noOfPredecessors): precedentConstraints = [ float(number) for number in lines[currentLineIndex].split() ] currentPrecedence = self.tasks[int( precedentConstraints[0])] currentPrecedence.addEdge(currentTask, precedentConstraints[1]) currentLineIndex += 1 # line of baseDeadline self.baseDeadline = float(lines[currentLineIndex].split()[1]) currentLineIndex += 1 # line of deadline self.deadline = float(lines[currentLineIndex].split()[1]) currentLineIndex += 1 # line of arrivalTime self.arrivalTime = float(lines[currentLineIndex].split()[1]) currentLineIndex += 1 # line of ccr self.ccr = float(lines[currentLineIndex].split()[1]) currentLineIndex += 1 # line of alpha self.alpha = float(lines[currentLineIndex].split()[1]) currentLineIndex += 1 # line of height self.height = int(lines[currentLineIndex].split()[1]) currentLineIndex += 1 for i in range(0, self.height): newLayer = [] detailsOfRow = [ int(number) for number in lines[currentLineIndex].split() ] for taskId in detailsOfRow: newLayer.append(self.tasks[taskId]) self.tasks[taskId].layerId = i self.layers.append(newLayer) currentLineIndex += 1 pass
exc = None try: if not os.path.exists(dir + bin): raise Exception('path ' + dir + bin + ' does not exist') bin_id = get_binary_id(sql, bin) os_target.analysis_binary_instr_linear(sql, dir, bin, pkg_id, bin_id) condition = 'pkg_id=' + Table.stringify( pkg_id) + ' and bin_id=' + Table.stringify(bin_id) sql.update_record(tables['binary_list'], {'callgraph': False}, condition) sql.commit() except Exception as err: exc = sys.exc_info() if (ref and package.dereference_dir(dir, ref)) or unpacked: package.remove_dir(dir) if exc: raise exc[1], None, exc[2] subtasks['BinaryInstr'] = Task( name="Collect Binary Instruction Usage", func=BinaryInstr, arg_defs=["Package Name", "Binary Path", "Unpack Path"], job_name=lambda args: "Collect Binary Instruction Usage: " + args[ 1] + " in " + args[0])
def __init__(self, cache_dir, ignore=None): Task.__init__(self, 'clean_all_cache_task') self._cache_dir = cache_dir self._ignore = ignore
from agents.agent import Agent from stat_collector import StatCollector from task import Task from utils import run_episode, plot_training_graphs # Params num_episodes = 1000 evaluate_every = 10 model_file = './model.ckpt' with tf.Session() as sess: # Setup task = Task( init_pose= np.array([0., 0., 10., 0., 0., 0.]), target_pos = np.array([0., 0., 10.]), ) stat = StatCollector() agent = Agent(task, sess, stat) saver = tf.train.Saver() # Run Training for i_episode in range(num_episodes): stat.tick() # Train policy and Q-Network score, steps = run_episode(sess, agent, task, train=True) stat.scalar('episode_steps_train', steps) stat.scalar('episode_reward_train', score) print('Episode = {:4d}, score train = {:7.3f}, steps = {}'.format(i_episode, score, steps))
def make_task_to_copy(): date = datetime(2021, 3, 5, 12, 30, 0) date = date.replace(tzinfo=tzutc()) return Task('copied task', 987654321, 'new description', True, date, False)
def make_task_list(): date = datetime(2020, 3, 5, 12, 30, 0) date = date.replace(tzinfo=tzutc()) return [Task('task1', 123456789, 'description1', False, date, True)]
def main(): if len(sys.argv) != 2: print 'Please provide a path to a model data directory.' print ('The script will load the newest model data from the directory,' 'then continue to improve that model') sys.exit(0) model_directory = sys.argv[1] existing_models = sorted(glob(os.path.join(model_directory, '*.rlmdl'))) if existing_models: newest_model_name = existing_models[-1] iteration_count = int(newest_model_name[-12:-6]) + 1 print 'Loading model {}'.format(newest_model_name) newest_model = open(newest_model_name, 'r') agent = pickle.load(newest_model) else: net = buildNetwork(Environment.outdim, Environment.outdim + Environment.indim, Environment.indim) agent = OptimizationAgent(net, PGPE()) iteration_count = 1 environment = Environment(LOCAL_HOST, PORT, PATH_TO_SCENE) task = Task(environment) experiment = EpisodicExperiment(task, agent) def signal_handler(signal, frame): print 'Exiting gracefully' environment.teardown() sys.exit(0) signal.signal(signal.SIGINT, signal_handler) while True: time.sleep(1) print '>>>>> Running iteration {}'.format(iteration_count) # NOTE this weird stuff is hacky, but we need it to plug in our autosave # stuff properly. Took a long time to figure this out. experiment.optimizer.maxEvaluations = experiment.optimizer.numEvaluations + experiment.optimizer.batchSize try: experiment.doEpisodes() except Exception as e: print 'ERROR RUNNING SIMULATION: \n{}'.format(e) environment.teardown() else: if iteration_count % AUTOSAVE_INTERVAL == 0: filename = str(iteration_count).zfill(6) + '.rlmdl' filename = os.path.join(model_directory, filename) f = open(filename, 'w+') print 'Saving model to {}'.format(filename) pickle.dump(agent, f) iteration_count += 1 print 'Iteration finished <<<<<'
max_avg_score = avg_score if i_episode % 20 == 0: print( '\rEpisode {}/{} | Max Average Score: {} | Current Average Score: {}' .format(i_episode, num_episodes, max_avg_score, avg_score), end='') sys.stdout.flush() return scores # Task setting (hover task) init_pose = np.array([0., 0., .1, 0., 0., 0.]) #start: x=0, y= 0, z=2 target_pos = np.array([0., 0., 10.]) #goal: x=3, y= 3, z=10 task = Task(init_pose=init_pose, target_pos=target_pos) # Hyper parameters num_episodes = 500 # max number of episodes to learn from max_steps = 200 # max steps in an episode gamma = 0.95 tau = 0.001 # Network parameters learning_rate_actor = 1e-5 learning_rate_critic = 1e-5 # Memory parametes memory_size = 10000 batch_size = 64