def sample_plan(self, plan_id, plan_length, rho, views, view_values, view_costs, current_view_id): remaining_views = dict() for v in views: remaining_views[v.ID] = v # current_view_id = remaining_views.keys()[0] NOW FUNCTION ARGUMENT plan = Plan(plan_id) # pmf for values value_pmf = self._generate_value_pmf(view_values) # pmf for costs cost_pmf = self._generate_cost_pmf(current_view_id, remaining_views.keys(), rho, view_costs) # joint dist joint = make_joint(value_pmf, cost_pmf) joint.normalize() for j in range(plan_length): x = joint.random() plan.append(remaining_views[x]) remaining_views.pop(x) # sample without replacement # adapt both pmfs: value and cost value_pmf.unset(x) value_pmf.normalize() cost_pmf = self._generate_cost_pmf(x, remaining_views.keys(), rho, view_costs) # re-generate joint dist joint = make_joint(value_pmf, cost_pmf) joint.normalize() return plan
def all_plans(plans, configs): results = {} for planfile in plans: print('\n' + (' PLAN %s ' % planfile).center(80, '=')) Plan.parse(planfile).printtree() results[planfile] = all_configs(planfile, configs) return results
def test_update3(self): p = Plan("today") p.update("07:00 aufstehen\n08:00 essen") TestHelper.test_listByInstance(self,p.step_list,"1R") t = p.step_list[0] self.assertEqual(t.step_list[0],Entry("01:00","aufstehen")) self.assertEqual(t.step_list[1],Entry("00:00","essen"))
class Person: def __init__(self, name, boss_name, score): self.name = name self.score = score self.boss_name = boss_name self.children = [] self.attending_plan_cache = None self.not_attending_plan_cache = None def add_child(self, child): self.children.append(child) def attending_plan(self): if not self.attending_plan_cache: self.attending_plan_cache = Plan(self, True) for child in self.children: self.attending_plan_cache.merge(child.not_attending_plan()) return self.attending_plan_cache def not_attending_plan(self): if not self.not_attending_plan_cache: self.not_attending_plan_cache = Plan(self, False) for child in self.children: self.not_attending_plan_cache.merge(child.best_plan()) return self.not_attending_plan_cache def best_plan(self): if self.attending_plan().is_better_than(self.not_attending_plan()): return self.attending_plan() else: return self.not_attending_plan()
def __init__(self,h): self.problem = BlocksWorld() # start with an empty plan self.plan = Plan() # h is a "heuristic" object self.h=h
def setUp(self): self.single_plan = Plan('Single', 49, 1) self.plus_plan = Plan('Plus', 99, 3) self.website_1 = Website('https://google.com') self.website_2 = Website('https://google.com') self.customer_1 = Customer('customer_1', '123456789', '*****@*****.**')
def test_update4(self): text = "07:00 aufstehen\n08:00 essen\n09:00 Zaehneputzen" p = Plan("today") p.update(text) TestHelper.test_listByInstance(self,p.step_list,"1R") t = p.step_list[0] self.assertEqual(t.step_list[0],Entry("01:00","aufstehen")) self.assertEqual(t.step_list[1],Entry("01:00","essen")) self.assertEqual(t.step_list[2],Entry("00:00","Zaehneputzen"))
def DestroyPlan(command): """ 取消指定任务 :param command: :return: """ cron = Plan(command) # cron.command('node index.js', every='1.day', at='16:19') cron.run('clear')
def CreatePlan(command): """ 创建指定命令任务 :param command: :return: """ cron = Plan(command) cron.command('node ' + path + '/index.js ' + command, every='1.day', at='16:51') cron.run('write')
class TestPlan(unittest.TestCase): def setUp(self): self.plus_plan = Plan('Plus', 99, 3) def test_new_website_allowed(self): self.assertTrue(self.plus_plan.new_website_allowed(2)) def test_new_website_not_allowed(self): self.assertFalse(self.plus_plan.new_website_allowed(4))
def main(): usage = "import file csv to Odoo: %prog [options]" parser = OptionParser(usage) parser.add_option("-N", "--db_name", dest="db_name", help="OpenERP database name") parser.add_option("-U", "--db_user",dest="db_user",help="OpenERP database user") parser.add_option("-P", "--db_password", dest="db_password", help="OpenERP database password") parser.add_option("-H", "--host_openERP", dest="host_openERP", help="OpenERP server host", default="http://localhost") parser.add_option("-K", "--port_openERP", dest="port_openERP", help="OpenERP server port", default="8069") parser.add_option("-p", "--path_openERP", dest="path_openERP", help="path of file for uploading", default="plan_contraloria_bogota/") parser.add_option("-a", "--avance_openERP", dest="avance_openERP", help="tiene avance el conjunto de script. valores 1 para verdadero 0 para falso", default="1") parser.add_option("-b", "--state_accion_openERP", dest="state_accion_openERP", help="state for acción") parser.add_option("-c", "--crear_jefes_openERP", dest="crear_jefes_openERP", help="Crear los jefes dependencia en el conjunto de script. valores 1 para verdadero 0 para falso", default="0") parser.add_option("-d", "--debug", dest="debug", help="Mostrar mensajes de debug utilize 10", default=10) (options, args) = parser.parse_args() _logger.setLevel(int(options.debug)) if not options.db_name: parser.error('Parametro db_name no especificado') if not options.db_user: parser.error('Parametro db_user no especificado') if not options.db_password: parser.error('Parametro db_password no especificado') if not options.host_openERP: parser.error('Parametro model_openERP no especificadon') # Inicio del scrip connect = Connection(options) odoo = connect.get_connection() # Crear Parametros del sistema para poder crear avances today = fields.Datetime.now() wizard = odoo.model('plan_mejoramiento.wizard.activar_avance').create({ 'fecha_inicio': today, 'fecha_fin': today, }) # metodo crear fechas de avances wizard.activar_avance() _logger.debug('**********************************') _logger.debug('*** Inicio Script Cargue Masivo ***') _logger.debug('**********************************') _logger.debug('\n') # Plan plan = Plan(odoo, _logger, options) plan.open_file_plan() _logger.debug('\n') _logger.debug('**********************************') _logger.debug('*** Fin Script Cargue Masivo ***') _logger.debug('**********************************')
def random_plan(self): p = Plan() # Choose start condition AND INSTANTIATE IT: start_condition = random.choice(p.Conditionals)() start_condition.height = 1 p.action = start_condition p.this_conditionals.append(start_condition) done = False while not done: done = True # Update conditionals if not marked (marked = already dealt with) # and stop when hitting the max depth for c in p.this_conditionals: if c.marked == 0: done = False c.marked = 1 # Add a random expression: expression = random.choice(p.Expressions)() expression.obs1 = random.choice(p.Observations) expression.obs2 = random.choice(p.Observations) expression.depth = c.depth + 1 expression.marked = 1 expression.set_map(p) c.expression = expression # Must add terminal actions if we hit the max depth (minus 2) if c.depth == self.max-2: for key in c.options.keys(): axn = random.choice(p.Actions) axn.depth = c.depth + 1 c.options[key] = axn # Otherwise, we can add either a terminal action OR another conditional: else: for key in c.options.keys(): condition_or_action = random.random() if condition_or_action < .5: new_c = random.choice(p.Conditionals)() new_c.depth = c.depth + 1 c.options[key] = new_c p.this_conditionals.append(new_c) else: c.options[key] = random.choice(p.Actions) return p
def plan_direct(queue, code, init, Xname, X, Y, tag=None): from . import ast_conversion assert len(X) == len(Y) N = len(X) text = """ ////////// MAIN FUNCTION ////////// __kernel void fn( __global const int *${IN}starts, __global const ${INtype} *${IN}data, __global const int *${OUT}starts, __global ${OUTtype} *${OUT}data ) { const int n = get_global_id(0); if (n >= ${N}) return; __global const ${INtype} *${arg} = ${IN}data + ${IN}starts[n]; __global ${OUTtype} *${OUT} = ${OUT}data + ${OUT}starts[n]; /////vvvvv USER DECLARATIONS BELOW vvvvv ${init} /////vvvvv USER COMPUTATIONS BELOW vvvvv ${code} // END OF FUNC: put nothing after user code, since it can return } """ textconf = dict( init=_indent(init, 12), code=_indent(code, 12), N=N, arg=Xname, IN=ast_conversion.INPUT_NAME, INtype=X.cl_buf.ocldtype, OUT=ast_conversion.OUTPUT_NAME, OUTtype=Y.cl_buf.ocldtype, ) text = Template(text, output_encoding='ascii').render(**textconf) full_args = (X.cl_starts, X.cl_buf, Y.cl_starts, Y.cl_buf) _fn = cl.Program(queue.context, text).build().fn _fn.set_args(*[arr.data for arr in full_args]) gsize = (N, ) rval = Plan(queue, _fn, gsize, lsize=None, name="cl_direct", tag=tag) rval.full_args = full_args # prevent garbage-collection return rval
def main(): # Initialize different plans single_plan = Plan('Single', 49, 1) plus_plan = Plan('Plus', 99, 3) infinite_plan = Plan('Infinite', 249, -1) # Initialize multiple websites website_1 = Website('https://website_1.com') website_2 = Website('https://website_2.com') website_3 = Website('https://website_3.com') website_4 = Website('https://website_4.com') # Initialize multiple customers customer_1 = Customer('customer_1', '123456789', '*****@*****.**') customer_2 = Customer('customer_2', '123456789', '*****@*****.**') customer_3 = Customer('customer_3', '123456789', '*****@*****.**') # customer_1 subscribed for single_plan customer_1.add_subscription(single_plan) print("{} has subscribed for {} plan".format(customer_1, customer_1.subscription.plan)) # customer_1 added one website customer_1.add_website(website_1) print("{} has added website {} as per the {} plan".format(customer_1, \ customer_1.websites, customer_1.subscription.plan)) # customer_1 can not add more website in single_plan customer_1.add_website(website_2) print("{} can't add website {} as per the {} plan".format(customer_1, \ website_2, customer_1.subscription.plan)) # customer_1 can change plan from single_plan to plus_plan customer_1.change_plan(plus_plan) print("{} has changed his current plan {} to {} plan".format(customer_1, \ single_plan, customer_1.subscription.plan)) # customer_2 subscribe for infinite_plan customer_2.add_subscription(infinite_plan) # customer_2 can add multiple websites customer_2.add_website(website_1) customer_2.add_website(website_2) customer_2.add_website(website_3) customer_2.add_website(website_4) print("{} has added four websites {} under infinite plan".format(customer_2, \ customer_2.websites))
def test_random_token_holder(): print("testing test_random_token_holder...\n") environment = Environment(yaml_file="utils/simple.yaml") agent1 = Agent(mode = "normal", start_pos = (0, 0), goal_pos = (10.5, 5.5), environment = environment, goal_dist = 0.3, rrt_iters = 200) agent2 = Agent(mode = "normal", start_pos = (2, 3), goal_pos = (0, 4), environment = environment, goal_dist = 0.3, rrt_iters = 200) assert not agent1.token_holder assert not agent2.token_holder plan = Plan(agents = [agent1, agent2], env = environment, dma_indiv = sol_dma_individual, dma_coop = None, spin_rate = 10 # Hz ) assert agent1.token_holder or agent2.token_holder assert not (agent1.token_holder and agent2.token_holder)
def __init__(self, commands=None, script=None, crons=None, config=None, need_sudo=False, supervisor_conf_path="/etc/supervisord.d/"): self.need_sudo = need_sudo self.supervisor_conf_path = supervisor_conf_path self.crons = self.get_crons(crons) self.scripts = self.get_scripts(script) self.commands = self.get_commands(commands) self.home = os.path.dirname(os.path.abspath(self.__module__)) self.config = Config(self.home, config) self.cron = Plan(self.config.project) self.init_tasks()
def run_test(self, steps, ordering_constraints, causal_links, test_name=""): """ given the steps, ordering_constraints, causal_links, return whether it is complete, consistent, a solution and list the linearization the test_name is optional which is the name of the currently test running, so that it is convenient which test results is which when run in bulk """ p = Plan(steps, ordering_constraints, causal_links) complete = self.isComplete(p) consistent = self.isConsistent(p) solution = self.isSolution(p) linearizations = [] if solution: linearizations = self.createLinearization(p) print("=======================================================") print("This plan is: ", test_name) print("Complete: ", complete) print("Consistent: ", consistent) print("Solution: ", solution) if solution: print("Linearization: ") for linearization in linearizations: print("\t", linearization) return complete, consistent, solution, linearizations
def single(planfile, configfile): plan, config = Plan.parse(planfile), Config.parse(configfile) distplan = best(plan, config) cost = distplan.cost(dest=config['CL1']) print('Best cost:', cost) distplan.printtree(dest=config['CL1']) return cost
def plan_copy(queue, src, dst, tag=None): if not (src.shape == dst.shape): raise ValueError() if (src.data.size != dst.data.size): raise NotImplementedError('size', (src, dst)) if (src.strides != dst.strides): raise NotImplementedError('strides', (src, dst)) # XXX: only copy the parts of the buffer that are part of the logical Array # XXX: use the elemwise kernel generator above config = {'src_type': src.ocldtype, 'dst_type': dst.ocldtype, 'src_offset': int(src.offset / src.dtype.itemsize), 'dst_offset': int(dst.offset / src.dtype.itemsize), } _fn = cl.Program(queue.context, """ __kernel void fn(__global const %(src_type)s *src, __global %(dst_type)s *dst ) { dst[get_global_id(0) + %(dst_offset)s] = src[get_global_id(0) + %(src_offset)s]; } """ % config).build().fn _fn.set_args(src.data, dst.data) L, = src.shape return Plan(queue, _fn, (L,), None, name='copy', tag=tag)
def test_removeElement(self): r1 = self.routine.clone() r2 = self.routine.clone() p = Plan("Today") p.add(r1) p.add(self.routine) p.add(r2) endBefore = p.end p.splitRoutine(1,2) self.assertEqual(endBefore,p.end) self.assertEqual(len(p.step_list),4) self.assertEqual(p.step_list[0].theme,r1.theme) self.assertEqual(p.step_list[3].theme,r2.theme)
def test_pEqual(self): p1 = Plan("1") p2 = Plan("2") p1.add(self.routine) p2.add(self.routine) self.assertEqual(p1,p2) self.assertNotEqual(id(p1),id(p2))
def plan_elemwise(queue, body, inputs, outputs): # THIS IS A REFERENCE IMPLEMENTATION if len(outputs) > 1: raise NotImplementedError() if outputs[0].ndim > 3: raise NotImplementedError() # this will be modified many times full_body = body for anum, arr in enumerate(inputs): varname = '\$IN_%i' % anum ptrname = 'IPTR_%i' % anum indexes = [] for jj in range(arr.ndim): indexes.append('gid%i * %i' % (jj, arr.itemstrides[jj])) repl = '%s[%s]' % (ptrname, ' + '.join(indexes)) full_body = re.sub(varname, repl, full_body) for anum, arr in enumerate(outputs): varname = '\$OUT_%i' % anum ptrname = 'OPTR_%i' % anum indexes = [] for jj in range(arr.ndim): indexes.append('gid%i * %i' % (jj, arr.itemstrides[jj])) repl = '%s[%s]' % (ptrname, ' + '.join(indexes)) full_body = re.sub(varname, repl, full_body) #print full_body params = [] params.extend( ['__global const %s * IPTR_%s' % (arr.ocldtype, inum) for inum, arr in enumerate(inputs)]) params.extend( ['__global %s * OPTR_%s' % (arr.ocldtype, inum) for inum, arr in enumerate(outputs)]) joined_params = ', '.join(params) text = """ __kernel void fn( %(joined_params)s ) { const int gid0 = get_global_id(0); const int gid1 = get_global_id(1); const int gid2 = get_global_id(2); %(full_body)s } """ % locals() # TODO: support for larger arrays than workgroup size _fn = cl.Program(queue.context, text).build().fn _fn_args = (queue, outputs[0].shape, None,) _fn_args = _fn_args + tuple([arr.data for arr in inputs + outputs]) return [Plan(locals())]
def create(): body = request.get_json() if "id" in body.keys(): del body["id"] p = Plan(**body) session.add(p) session.commit() return Response(p).as_response()
def add(name: str, due_date: str = arrow.utcnow()): """Add a plan into the database.""" try: plan = Plan(name, arrow.get(due_date)) db.write(db.read() + [plan]) typer.echo(plan) except arrow.ParserError: typer.echo('The time was not correctly formatted.')
def plan_direct(queue, code, init, Xname, X, Y, tag=None): from . import ast_conversion assert len(X) == len(Y) N = len(X) text = """ ////////// MAIN FUNCTION ////////// __kernel void fn( __global const int *${IN}starts, __global const ${INtype} *${IN}data, __global const int *${OUT}starts, __global ${OUTtype} *${OUT}data ) { const int n = get_global_id(0); if (n >= ${N}) return; __global const ${INtype} *${arg} = ${IN}data + ${IN}starts[n]; __global ${OUTtype} *${OUT} = ${OUT}data + ${OUT}starts[n]; /////vvvvv USER DECLARATIONS BELOW vvvvv ${init} /////vvvvv USER COMPUTATIONS BELOW vvvvv ${code} // END OF FUNC: put nothing after user code, since it can return } """ textconf = dict(init=_indent(init, 12), code=_indent(code, 12), N=N, arg=Xname, IN=ast_conversion.INPUT_NAME, INtype=X.cl_buf.ocldtype, OUT=ast_conversion.OUTPUT_NAME, OUTtype=Y.cl_buf.ocldtype, ) text = Template(text, output_encoding='ascii').render(**textconf) full_args = (X.cl_starts, X.cl_buf, Y.cl_starts, Y.cl_buf) _fn = cl.Program(queue.context, text).build().fn _fn.set_args(*[arr.data for arr in full_args]) gsize = (N,) rval = Plan(queue, _fn, gsize, lsize=None, name="cl_direct", tag=tag) rval.full_args = full_args # prevent garbage-collection return rval
def __init__(self, game, y, x): """Initialise the player object""" super(NPC, self).__init__(game) self.y = y self.x = x self.colour = Constants.COLOUR_WHITE self.path = [] self.square = None self.plan = Plan(self) self.currentBehaviour = DefaultBehaviour(self) self.alive = True self.killer = False self.dialogue = dialogue.Dialogue(self) standardDialogueChoice1 = dialogue.DialogueChoice("Hello!", "Hello to you too!") standardDialogueChoice2 = dialogue.DialogueChoice("My name is Kate!", "Fascinating.") def responseFunction3(npc, response): npc.game.player.notebook.addToKnownNpcs(self) actualResponse = "My name is " + npc.firstName + " " + npc.lastName actualResponse += ". I live in house number " + str(npc.square.house.number) actualResponse += "." return actualResponse standardDialogueChoice3 = dialogue.DialogueChoice("Who are you?", "", responseFunction3) standardDialogueChoice4 = dialogue.DialogueChoice("No, hello to YOU!", "We're done talking, freakshow.") secondNode = dialogue.DialogueNode() secondNode.addChoice(standardDialogueChoice4) choicePredicate3 = lambda: not self.game.player.notebook.isNpcKnown(self) dialogueRoot = dialogue.DialogueNode() dialogueRoot.addChoice(standardDialogueChoice1, None, secondNode) dialogueRoot.addChoice(standardDialogueChoice2) dialogueRoot.addChoice(standardDialogueChoice3, choicePredicate3) self.dialogue.setRootNode(dialogueRoot) # Fluffy, plot stuff self.gender = random.choice([Gender.MALE, Gender.FEMALE]) self.firstName = "Dave" if self.gender == Gender.MALE: self.firstName = names.getMaleFirstName() else: self.firstName = names.getFemaleFirstName() self.lastName = names.getLastName() self.eyeColour = random.choice(["green", "blue", "brown"]) self.hairColour = random.choice(["brown", "red", "blonde"]) self.description = "They have " + self.eyeColour + " eyes and " + self.hairColour + " hair." # Emotions and states # TODO: Something with this? self.scared = False self.answeringDoor = False
def getCurrentPlan(self): # Generate plan in current enviroment self.generatePlans() # Extract the EUROPA Plan directly from the PLASMA Database europa_log = self.europa.planDatabaseToString() plan = Plan(europa_log) return plan
def createPlan(self, string: str): plan = Plan("today") special_element = None for i, e in enumerate(string): if e == 'E': ele = addToPlan(self, plan, i, string, Entry) if not ele == None: special_element = ele elif e == "R": addToPlan(self, plan, i, string, Routine) return plan, special_element
def run(mk, rm, mod, find, det, pg, p_opt): db = DB() cliOption = None printOption = None #Check which option is given if mk: cliOption = Make(db, mk) elif rm: cliOption = Remove(db, rm) elif mod: cliOption = Modify(db, mod) elif find: cliOption = Find(db, find) elif det: cliOption = Detail(db, det) elif p_opt: printOption = p_opt #Dummy lines # cur.execute("select * from todo where 1") # rows = cur.fetchall() # if rows: # for row in rows: # iregular = re.compile(r"(\d{4})[-](\d{2})[-](\d{2})\s(\d{2})[:](\d{2})") # iregular2 = re.compile(r"(\d{4})[-](\d{2})[-](\d{2})\s(\d{2})[:](\d{2})") # idue = row[2] # i_match = iregular.match(idue) # t = datetime.datetime.now() # now = iregular2.match(str(t)) # for i in range(1,6): # if int(i_match.group(i)) < int(now.group(i)): # sql = "delete from todo where due = ?" # cur.execute(sql, (i_match.group(0))) # conn.commit() # elif int(i_match.group(i)) > int(now.group(i)): # break if cliOption != None: if cliOption.check(): cliOption.execute() db.conn.close() return Plan(db, pg, printOption).show() db.conn.close()
def read(self) -> list[Plan]: """Read all plans currently stored in the database.""" lst: list[Plan] = [] with open(self.path, 'r') as database: lines = database.readlines() # Divide lines into chunks of 2. plans = [lines[x:x + 2] for x in range(0, len(lines), 2)] for name, due_date in plans: name, due_date = name.strip(), arrow.get(due_date.strip()) lst.append(Plan(name, due_date)) return lst
def add_major(db, dcode, mcode): plan = Plan(mcode) mycursor = db.cursor() query = "SELECT * FROM majors WHERE mcode = '{searchCode}'".format(searchCode = plan.code) mycursor.execute(query) existingMajors = mycursor.fetchall() if (len(existingMajors) > 0): print("already exists") return sql = "INSERT INTO majors (dcode, mcode, type, name) VALUES (%s, %s, %s, %s)" val = (dcode, plan.code, plan.type, plan.title) print(val) mycursor.execute(sql, val) db.commit()
def plan_inc(queue, buf, amt, tag=None): # XXX: only copy the parts of the buffer that are part of the logical Array # XXX: use the elemwise kernel generator above config = {'buf_type': buf.ocldtype, 'amt': amt} _fn = cl.Program(queue.context, """ __kernel void fn(__global %(buf_type)s *dst) { dst[get_global_id(0)] = dst[get_global_id(0)] + %(amt)s; } """ % config).build().fn assert buf.offset == 0 L, = buf.shape _fn.set_args(buf.data,) return Plan(queue, _fn, (L,), None, name='inc', amt=amt, tag=tag)
def initialize_open(self, Q, R, P0, kmax): self.clear() self.plan_number = 0 p_init = Plan( self.x_init, self.start_idx, self.graph.env, self.x_init.shape[0], R, kmax, ) # Set variables in p_init p_init.set_motion(self.graph.planner.A, self.graph.planner.B, Q) p_init.set_gain(self.graph.planner.gain) p_init.set_init_est(P0) self.P_open.add((p_init, self.plan_number)) self.P[0] = set() self.P[0].add(deepcopy(p_init)) self.G = self.P_open.copy()
def make_plan(designs, X_source_dist, X_target_dist): plan = Plan() for d in designs: name = d[0] dgn = d[1] estr = d[2] design_kwargs = d[3] design_kwargs['source'] = X_source_dist design_kwargs['target'] = X_target_dist plan.add_design(name, dgn, estr, design_kwargs) plan.add_evaluator('ATEError', evalr.ATEError) return plan
def test_insertTime(self): test_source = """00:01 hi wie gehts? 00:03 mir gehts gut!""" plan= Plan("heute") plan.add(Entry("00:02","hi wie gehts?",start="00:01")) plan.add(Entry("00:05","mir gehts gut!",start="00:03")) string = ParseText.insertEndTime( plan,test_source,1) string = string.split("\n") l = test_source.split("\n") self.assertEqual(len(string),len(l)) self.assertEqual(l[0],string[0]) self.assertNotEqual(l[1],string[1]) self.assertEqual(string[1].strip(),"00:03") self.assertEqual(l[2],string[2])
def combineEntriesAndStructure(theme: str, structure: list, entries: list): entrie_index = 0 plan = Plan(theme) for struc in structure: if struc.count == 1: plan.add(entries[entrie_index]) entrie_index += 1 elif struc.count > 1: r = Routine(struc.theme) r.start = struc.start c = struc.count for i in range(c): r.add(entries[entrie_index + i]) entrie_index += c plan.add(r) return plan
def test_agents_aware_of_peers(): print("testing test_agents_aware_of_peers...\n") environment = Environment(yaml_file="utils/simple.yaml") agent1 = Agent(mode = "normal", start_pos = (0, 0), goal_pos = (10.5, 5.5), environment = environment, goal_dist = 0.3, rrt_iters = 200) agent2 = Agent(mode = "normal", start_pos = (2, 3), goal_pos = (0, 4), environment = environment, goal_dist = 0.3, rrt_iters = 200) assert not agent1.bids.keys() assert not agent2.bids.keys() assert not agent1.other_agent_plans.keys() assert not agent2.other_agent_plans.keys() plan = Plan(agents = [agent1, agent2], env = environment, dma_indiv = sol_dma_individual, dma_coop = None, spin_rate = 10 # Hz ) assert agent1.antenna.uuid not in agent1.bids assert agent1.antenna.uuid not in agent1.other_agent_plans assert agent2.antenna.uuid in agent1.bids assert agent2.antenna.uuid in agent1.other_agent_plans assert agent1.antenna.uuid in agent2.bids assert agent1.antenna.uuid in agent2.other_agent_plans assert agent2.antenna.uuid not in agent2.bids assert agent2.antenna.uuid not in agent2.other_agent_plans
def __init__(self): self.base = os.getcwd() self.config = os.path.join(self.base, 'config') self.architecture = getoutput("dpkg --print-architecture") self.trustedkeys = os.path.join(self.config, 'trustedkeys.gpg') self.sources_list = os.path.join(self.config, 'sources.list') for f in (self.sources_list, self.trustedkeys): if not os.path.exists(f): raise Error("required file not found: " + f) conf = ChankoConfig(os.path.join(self.config, 'chanko.conf')) os.environ['CCURL_CACHE'] = conf.ccurl_cache self.archives = os.path.join(self.base, 'archives') makedirs(os.path.join(self.archives, 'partial')) plan_path = os.path.join(self.base, 'plan') plan_cpp = conf.plan_cpp.replace("-", " -").strip() plan_cpp = plan_cpp.split(" ") if plan_cpp else [] self.plan = Plan(plan_path, self.architecture, plan_cpp) self.local_cache = LocalCache(self) self.remote_cache = RemoteCache(self)
def plan_parallel_ragged_gather_gemv2(queue, Ms, Ns, alpha, A, A_js, X, X_js, beta, Y, group_size = 32, Y_in=None, tag=None): """ """ # TODO: if alpha or beta is a float # then render it into the kernel text. try: float(alpha) alpha = [alpha] * len(Y) except TypeError: pass try: float(beta) beta = [beta] * len(Y) except TypeError: pass cl_alpha = to_device(queue, np.asarray(alpha, Y.buf.dtype)) cl_beta = to_device(queue, np.asarray(beta, Y.buf.dtype)) if Y_in is None: Y_in = Y # XXX check for e.g. all Ns being the same thing # especially all Ns == 1 cl_Ns = to_device(queue, np.asarray(Ns, 'int32')) # XXX check that all the ints are ints not longs textconf = { 'type_alpha': cl_alpha.ocldtype, 'type_beta': cl_beta.ocldtype, 'type_A': A.cl_buf.ocldtype, 'type_X': X.cl_buf.ocldtype, 'type_Y': Y.cl_buf.ocldtype, 'y_len': len(Y), 'lsize': group_size, } text = """ __kernel void fn( const __global int *Ns, const __global ${type_alpha} * alphas, const __global int *A_starts, const __global ${type_A} *A_data, const __global int *A_js_starts, const __global int *A_js_lens, const __global int *A_js_data, const __global int *X_starts, const __global ${type_X} *X_data, const __global int *X_js_starts, const __global int *X_js_data, const __global ${type_beta} * betas, const __global int *Y_in_starts, const __global ${type_Y} *Y_in_data, const __global int *Y_starts, const __global int *Y_lens, __global ${type_Y} *Y_data) { //const int mm = get_global_id(1); //TODO __local ${type_Y} partialDotProduct[${lsize}]; //Scratch space for the dot products //Y is divided into groups of size group_size. Each work-item does enough dot-products to cover one of the groups for (uint yi = get_group_id(0); yi < ${y_len}; yi += get_num_groups(0)) { const __global int* X_js_row = X_js_data + X_js_starts[yi]; const __global int* A_js_row = A_js_data + A_js_starts[yi]; const ${type_alpha} alpha = alphas[yi]; const ${type_beta} beta = betas[yi]; int y_offset = Y_starts[yi]; int y_in_offset = Y_in_starts[yi]; Y_data[y_offset] = beta * Y_in_data[y_in_offset]; float sum = 0; int n_dot_products = A_js_lens[yi]; //Do all of xjs dot products at same time for(int j = 0; j < n_dot_products; j++) { int x_ji = X_js_row[j]; int a_ji = A_js_row[j]; int N_i = Ns[a_ji]; const __global ${type_A}* A_row = A_data + A_starts[a_ji]; //Get the rows for the product const __global ${type_X}* X_row = X_data + X_starts[x_ji]; //Each work item will do some fraction of the multiplications and store the result locally for (uint x = get_local_id(0); x < N_i; x += get_local_size(0)) { sum += A_row[x] * X_row[x]; } } partialDotProduct[get_local_id(0)] = sum; //Parallel reduction of locally stored sums for (uint stride = 1; stride < get_local_size(0); stride *= 2) { barrier(CLK_LOCAL_MEM_FENCE); uint index = 2 * stride * get_local_id(0); if (index < get_local_size(0)) { partialDotProduct[index] += partialDotProduct[index + stride]; } } //Multiply by alpha and store the result. if (get_local_id(0) == 0) { Y_data[yi] += alpha * partialDotProduct[0]; barrier(CLK_LOCAL_MEM_FENCE); } } } """ text = Template(text, output_encoding='ascii').render(**textconf) #Make the global size the closest multiple of the group size (ceiling) y_size = int(math.ceil(len(Y) / float(group_size))) * group_size gsize = (y_size,) lsize = (group_size,) _fn = cl.Program(queue.context, text).build().fn full_args = (cl_Ns, cl_alpha, A.cl_starts, A.cl_buf, A_js.cl_starts, A_js.cl_lens, A_js.cl_buf, X.cl_starts, X.cl_buf, X_js.cl_starts, X_js.cl_buf, cl_beta, Y_in.cl_starts, Y_in.cl_buf, Y.cl_starts, Y.cl_lens, Y.cl_buf, ) _fn.set_args(*[arr.data for arr in full_args]) rval = Plan(queue, _fn, gsize, lsize, name='ref_parallel_ragged_gather_gemv', tag=tag, ) # prevent garbage-collection rval.alpha = cl_alpha rval.beta = cl_beta rval.Ns = cl_Ns return rval
# Use this file to easily define all of your cron jobs. # # It's helpful to understand cron before proceeding. # http://en.wikipedia.org/wiki/Cron # # Learn more: http://github.com/fengsp/plan import os from os.path import join as pjoin from plan import Plan dir_path = os.path.dirname(os.path.realpath(__file__)) cron = Plan( "scripts", path=pjoin(dir_path, '../scrape'), environment={'DJANGO_SETTINGS_MODULE': 'scrape.settings_production'} ) # register one command, script or module # cron.command('command', every='1.day') # cron.script('script.py', path='/web/yourproject/scripts', every='1.month') # cron.module('calendar', every='feburary', at='day.3') cron.command('cd %s && DJANGO_SETTINGS_MODULE=scrape.settings_production $HOME/venv/bin/scrapy crawl eoaient' % (pjoin(dir_path, '../scrape/crawler')), every='2.day', at='minute.48') cron.command('cd %s && DJANGO_SETTINGS_MODULE=scrape.settings_production $HOME/venv/bin/scrapy crawl ck0tp' % (pjoin(dir_path, '../scrape/crawler')), every='3.day', at='minute.12') cron.script('manage.py extoon_info', every='5.hour', at='minute.30') cron.script('manage.py extoon_description', every='6.hour', at='minute.15')
if opt == '-h': usage() if opt in ('-o', '--output'): output_path = val if opt in ('-p', '--pool'): pool_path = val if opt == "--bootstrap": if not os.path.isdir(val): fatal("directory does not exist (%s)" % val) bootstrap_path = val plan = Plan(pool_path=pool_path) if bootstrap_path: bootstrap_packages = set(iter_packages(bootstrap_path)) plan |= bootstrap_packages for package in bootstrap_packages: plan.packageorigins.add(package, 'bootstrap') for arg in args: if arg == "-" or os.path.exists(arg): subplan = Plan.init_from_file(arg, cpp_opts, pool_path) plan |= subplan for package in subplan: plan.packageorigins.add(package, arg)
opts, args = gnu_getopt(args, 'p:', ['pool=']) except getopt.GetoptError, e: usage(e) if not args: usage() pool_path = None for opt, val in opts: if opt in ('-p', '--pool'): pool_path = val if pool_path is None: pool_path = os.environ.get('FAB_POOL_PATH') plan = Plan(pool_path=pool_path) for arg in args: if arg == "-" or exists(arg): plan |= Plan.init_from_file(arg, cpp_opts, pool_path) else: plan.add(arg) dctrls = plan.dctrls() print generate_index(dctrls) if __name__=="__main__": main()
from plan import Plan from os import getcwd WORKING_DIR = getcwd() + '/../' cron = Plan("lyket_ingestion_cron") cron.script('LyketJob.py', every='5.minutes', path=WORKING_DIR) if __name__ == '__main__': try: cron.run('update') except: cron.run('write')
# -*- coding: utf-8 -*- """ demo ~~~~ :copyright: (c) 2014 by Shipeng Feng. :license: BSD, see LICENSE for more details. """ from plan import Plan cron = Plan() cron.command('ls /tmp', every='1.day', at='12:00') cron.command('pwd', every='2.month') cron.command('date', every='weekend') if __name__ == "__main__": cron.run()
def attending_plan(self): if not self.attending_plan_cache: self.attending_plan_cache = Plan(self, True) for child in self.children: self.attending_plan_cache.merge(child.not_attending_plan()) return self.attending_plan_cache
def _plan_template(queue, name, core_text, declares="", tag=None, n_elements=0, inputs={}, outputs={}, parameters={}): """Template for making a plan for vector nonlinearities. This template assumes that all inputs and outputs are vectors. Parameters ---------- n_elements: int If n_elements == 0, then the kernels are allocated as a block. This is simple, but can be slow for large computations where input vector sizes are not uniform (e.g. one large population and many small ones). If n_elements >= 1, then all the vectors in the RaggedArray are flattened so that the exact number of required kernels is allocated. Each kernel performs computations for `n_elements` elements. inputs: dictionary of CLRaggedArrays Inputs to the function. RaggedArrays must be a list of vectors. outputs: dictionary of CLRaggedArrays Outputs of the function. RaggedArrays must be a list of vectors. parameters: dictionary of CLRaggedArrays Parameters to the function. Each RaggedArray element must be a vector of the same length of the inputs, or a scalar (to be broadcasted). Providing a float instead of a RaggedArray makes that parameter constant. """ base = inputs.values()[0] # input to use as reference (for lengths) N = len(base) ### split parameters into static and updated params static_params = {} # static params (hard-coded) params = {} # variable params (updated) for k, v in parameters.items(): if isinstance(v, CLRaggedArray): params[k] = v else: try: static_params[k] = ("float", float(v)) except TypeError: raise avars = {} for vname, v in inputs.items() + outputs.items(): assert vname not in avars, "Name clash" assert len(v) == N assert all_equal(v.shape0s, base.shape0s) ### N.B. - we should be able to ignore ldas as long as all vectors assert all_equal(v.shape1s, 1) dtype = v.cl_buf.ocldtype offset = "%(name)s_starts[n]" % {"name": vname} avars[vname] = (dtype, offset) for vname, v in params.items(): assert vname not in avars, "Name clash" assert len(v) == N for i in xrange(N): assert v.shape0s[i] == base.shape0s[i] or v.shape0s[i] == 1, "%s.shape0s[%d] must be 1 or %d (not %d)" % ( vname, i, base.shape0s[i], v.shape0s[i], ) assert v.shape1s[i] == 1 dtype = v.cl_buf.ocldtype offset = "%(name)s_starts[n]" % {"name": vname} avars[vname] = (dtype, offset) ivars = dict((k, avars[k]) for k in inputs.keys()) ovars = dict((k, avars[k]) for k in outputs.keys()) pvars = dict((k, avars[k]) for k in params.keys()) textconf = dict( N=N, n_elements=n_elements, tag=str(tag), declares=declares, core_text=core_text, ivars=ivars, ovars=ovars, pvars=pvars, static_params=static_params, ) if n_elements > 0: ### Allocate the exact number of required kernels in a vector gsize = (int(np.ceil(np.sum(base.shape0s) / float(n_elements))),) text = """ ////////// MAIN FUNCTION ////////// __kernel void fn( % for name, [type, offset] in ivars.items(): __global const int *${name}_starts, __global const ${type} *in_${name}, % endfor % for name, [type, offset] in ovars.items(): __global const int *${name}_starts, __global ${type} *in_${name}, % endfor % for name, [type, offset] in pvars.items(): __global const int *${name}_starts, __global const int *${name}_shape0s, __global const ${type} *in_${name}, % endfor __global const int *lengths ) { const int gid = get_global_id(0); int m = gid * ${n_elements}, n = 0; while (m >= lengths[n]) { m -= lengths[n]; n++; } if (n >= ${N}) return; % for name, [type, offset] in ivars.items(): __global const ${type} *cur_${name} = in_${name} + ${offset} + m; % endfor % for name, [type, offset] in ovars.items(): __global ${type} *cur_${name} = in_${name} + ${offset} + m; % endfor % for name, [type, offset] in pvars.items(): __global const ${type} *cur_${name} = in_${name} + ${offset}; int ${name}_isvector = ${name}_shape0s[n] > 1; if (${name}_isvector) cur_${name} += m; % endfor % for name, [type, offset] in ivars.items() + ovars.items() + pvars.items(): ${type} ${name}; % endfor % for name, [type, value] in static_params.items(): const ${type} ${name} = ${value}; % endfor ////////////////////////////////////////////////// //vvvvv USER DECLARATIONS BELOW vvvvv ${declares} //^^^^^ USER DECLARATIONS ABOVE ^^^^^ ////////////////////////////////////////////////// % for ii in range(n_elements): ////////////////////////////////////////////////// ////////// LOOP ITERATION ${ii} % for name, [type, offset] in ivars.items(): ${name} = *cur_${name}; % endfor % for name, [type, offset] in pvars.items(): if ((${ii} == 0) || ${name}_isvector) ${name} = *cur_${name}; % endfor /////vvvvv USER COMPUTATIONS BELOW vvvvv ${core_text} /////^^^^^ USER COMPUTATIONS ABOVE ^^^^^ % for name, [type, offset] in ovars.items(): *cur_${name} = ${name}; % endfor % if ii + 1 < n_elements: m++; if (m >= lengths[n]) { n++; m = 0; if (n >= ${N}) return; % for name, [type, offset] in ivars.items() + ovars.items() + pvars.items(): cur_${name} = in_${name} + ${offset}; % endfor % for name, [type, offset] in pvars.items(): ${name}_isvector = ${name}_shape0s[n] > 1; if (!${name}_isvector) ${name} = *cur_${name}; % endfor } else { % for name, [type, offset] in ivars.items() + ovars.items(): cur_${name}++; % endfor % for name, [type, offset] in pvars.items(): if (${name}_isvector) cur_${name}++; % endfor } % endif % endfor } """ else: ### Allocate more than enough kernels in a matrix gsize = (int(np.max(base.shape0s)), int(N)) text = """ ////////// MAIN FUNCTION ////////// __kernel void fn( % for name, [type, offset] in ivars.items(): __global const int *${name}_starts, __global const ${type} *in_${name}, % endfor % for name, [type, offset] in ovars.items(): __global const int *${name}_starts, __global ${type} *in_${name}, % endfor % for name, [type, offset] in pvars.items(): __global const int *${name}_starts, __global const int *${name}_shape0s, __global const ${type} *in_${name}, % endfor __global const int *lengths ) { const int m = get_global_id(0); const int n = get_global_id(1); const int M = lengths[n]; if (m >= M) return; % for name, [type, offset] in ivars.items(): ${type} ${name} = in_${name}[${offset} + m]; % endfor % for name, [type, offset] in ovars.items(): ${type} ${name}; % endfor % for name, [type, offset] in pvars.items(): const ${type} ${name} = (${name}_shape0s[n] > 1) ? in_${name}[${offset} + m] : in_${name}[${offset}]; % endfor % for name, [type, value] in static_params.items(): const ${type} ${name} = ${value}; % endfor ////////////////////////////////////////////////// //vvvvv USER DECLARATIONS BELOW vvvvv ${declares} //^^^^^ USER DECLARATIONS ABOVE ^^^^^ ////////////////////////////////////////////////// /////vvvvv USER COMPUTATIONS BELOW vvvvv ${core_text} /////^^^^^ USER COMPUTATIONS ABOVE ^^^^^ % for name, [type, offset] in ovars.items(): in_${name}[${offset} + m] = ${name}; % endfor } """ text = Template(text, output_encoding="ascii").render(**textconf) if 0: for i, line in enumerate(text.split("\n")): print "%3d %s" % (i + 1, line) full_args = [] for name, v in inputs.items() + outputs.items(): full_args.extend([v.cl_starts, v.cl_buf]) for name, v in params.items(): full_args.extend([v.cl_starts, v.cl_shape0s, v.cl_buf]) full_args.append(base.cl_shape0s) full_args = tuple(full_args) _fn = cl.Program(queue.context, text).build().fn _fn.set_args(*[arr.data for arr in full_args]) rval = Plan(queue, _fn, gsize, lsize=None, name=name, tag=tag) rval.full_args = full_args # prevent garbage-collection return rval
j['every'] = j.has_key('every') and j['every'] or '' j['at'] = j.has_key('at') and j['at'] or '' jobname ='job%s-%s'%(str(j['id']),j['every']) logsdir ='%s/Logs/cronlogs/%s'%(os.getcwd(),jobname) try: os.makedirs(logsdir) except Exception,e: pass exec_command='PATH=$PATH && %s'%j['cmd'] output = dict(stdout='%s/%s.stdout.log'%(logsdir,jobname), stderr='%s/%s.stderr.log'%(logsdir,jobname) ) cron = Plan(name='job'+str(j['id'])) if j.has_key('at') : r=cron.command(exec_command,every=j['every'],at=j['at'],output=output,) else: r=cron.command(exec_command,every=j['every'],output=output,) cron.run(j['action']) return True if __name__ == '__main__': #job1={'id':1,'cmd':'date','every':'2.day','action':'update','at':'hour.12 minute.15 minute.45'} job2={'id':2,'cmd':'/home/qilong/python/cron/1.sh','every':'1.hour','action':'update','at':'minute.10'} crontab(job2) #print job2 #job2=sys.argv[1]
def plan_ragged_gather_gemv(queue, Ms, Ns, alpha, A, A_js, X, X_js, beta, Y, Y_in=None, tag=None): """ """ # TODO: if alpha or beta is a float # then render it into the kernel text. try: float(alpha) alpha = [alpha] * len(Y) except TypeError: pass try: float(beta) beta = [beta] * len(Y) except TypeError: pass cl_alpha = to_device(queue, np.asarray(alpha, Y.buf.dtype)) cl_beta = to_device(queue, np.asarray(beta, Y.buf.dtype)) if Y_in is None: Y_in = Y # XXX check for e.g. all Ns being the same thing # especially all Ns == 1 cl_Ns = to_device(queue, np.asarray(Ns, 'int32')) # XXX check that all the ints are ints not longs textconf = { 'type_alpha': cl_alpha.ocldtype, 'type_beta': cl_beta.ocldtype, 'type_A': A.cl_buf.ocldtype, 'type_X': X.cl_buf.ocldtype, 'type_Y': Y.cl_buf.ocldtype, } text = """ __kernel void fn( __global int *Ns, __global ${type_alpha} * alphas, __global int *A_starts, __global ${type_A} *A_data, __global int *A_js_starts, __global int *A_js_lens, __global int *A_js_data, __global int *X_starts, __global ${type_X} *X_data, __global int *X_js_starts, __global int *X_js_data, __global ${type_beta} * betas, __global int *Y_in_starts, __global ${type_Y} *Y_in_data, __global int *Y_starts, __global int *Y_lens, __global ${type_Y} *Y_data) { const int mm = get_global_id(0); const int bb = get_global_id(1); const int M = Y_lens[bb]; if (mm < M) { const ${type_alpha} alpha = alphas[bb]; const ${type_beta} beta = betas[bb]; int n_dot_products = A_js_lens[bb]; int y_offset = Y_starts[bb]; int y_in_offset = Y_in_starts[bb]; X_js_data += X_js_starts[bb]; A_js_data += A_js_starts[bb]; Y_data[y_offset + mm] = beta * Y_in_data[y_in_offset + mm]; for (int ii = 0; ii < n_dot_products; ++ii) { int x_ji = X_js_data[ii]; int a_ji = A_js_data[ii]; int N_i = Ns[a_ji]; int x_offset = X_starts[x_ji]; int a_offset = A_starts[a_ji]; // compute the matrix-vector product // dot(X[x_ji], A[a_ji]) ${type_Y} y_sum = 0; for (int nn = 0; nn < N_i; ++nn) //Parallel reduction. How big is N_i? { y_sum += X_data[x_offset + nn] * A_data[a_offset + nn * M + mm]; } Y_data[y_offset + mm] += alpha * y_sum; } } } """ text = Template(text, output_encoding='ascii').render(**textconf) gsize = (int(max(Ms)), int(len(Y)),) lsize = None _fn = cl.Program(queue.context, text).build().fn full_args = (cl_Ns, cl_alpha, A.cl_starts, A.cl_buf, A_js.cl_starts, A_js.cl_lens, A_js.cl_buf, X.cl_starts, X.cl_buf, X_js.cl_starts, X_js.cl_buf, cl_beta, Y_in.cl_starts, Y_in.cl_buf, Y.cl_starts, Y.cl_lens, Y.cl_buf, ) #print [str(arr.dtype)[0] for arr in full_args] _fn.set_args(*[arr.data for arr in full_args]) rval = Plan(queue, _fn, gsize, lsize, name='ref_ragged_gather_gemv', tag=tag, ) # prevent garbage-collection rval.alpha = cl_alpha rval.beta = cl_beta rval.Ns = cl_Ns return rval
def not_attending_plan(self): if not self.not_attending_plan_cache: self.not_attending_plan_cache = Plan(self, False) for child in self.children: self.not_attending_plan_cache.merge(child.best_plan()) return self.not_attending_plan_cache
def plan_probes(queue, periods, X, Y, tag=None): """ Parameters ---------- P : raggedarray of ints The period (in time-steps) of each probe """ assert len(X) == len(Y) assert len(X) == len(periods) N = len(X) cl_countdowns = to_device(queue, np.zeros(N, dtype="int32")) cl_bufpositions = to_device(queue, np.zeros(N, dtype="int32")) cl_periods = to_device(queue, np.asarray(periods, dtype="int32")) assert X.cl_buf.ocldtype == Y.cl_buf.ocldtype ### N.B. X[i].shape = (ndims[i], ) ### Y[i].shape = (buf_ndims[i], buf_len) for i in xrange(N): assert X.shape0s[i] == Y.shape1s[i] assert X.shape1s[i] == 1 assert X.stride0s[i] == 1 assert Y.stride1s[i] == 1 text = """ ////////// MAIN FUNCTION ////////// __kernel void fn( __global int *countdowns, __global int *bufpositions, __global const int *periods, __global const int *Xstarts, __global const int *Xshape0s, __global const ${Xtype} *Xdata, __global const int *Ystarts, __global ${Ytype} *Ydata ) { const int n = get_global_id(1); const int countdown = countdowns[n]; if (countdown == 0) { const int n_dims = Xshape0s[n]; __global const ${Xtype} *x = Xdata + Xstarts[n]; const int bufpos = bufpositions[n]; __global ${Ytype} *y = Ydata + Ystarts[n] + bufpos * n_dims; for (int ii = get_global_id(0); ii < n_dims; ii += get_global_size(0)) { y[ii] = x[ii]; } // This should *not* cause deadlock because // all local threads guaranteed to be // in this branch together. barrier(CLK_LOCAL_MEM_FENCE); if (get_global_id(0) == 0) { countdowns[n] = periods[n] - 1; bufpositions[n] = bufpos + 1; } } else { barrier(CLK_LOCAL_MEM_FENCE); if (get_global_id(0) == 0) { countdowns[n] = countdown - 1; } } } """ textconf = dict(N=N, Xtype=X.cl_buf.ocldtype, Ytype=Y.cl_buf.ocldtype) text = Template(text, output_encoding="ascii").render(**textconf) full_args = (cl_countdowns, cl_bufpositions, cl_periods, X.cl_starts, X.cl_shape0s, X.cl_buf, Y.cl_starts, Y.cl_buf) _fn = cl.Program(queue.context, text).build().fn _fn.set_args(*[arr.data for arr in full_args]) max_len = min(queue.device.max_work_group_size, max(X.shape0s)) gsize = (max_len, N) lsize = (max_len, 1) rval = Plan(queue, _fn, gsize, lsize=lsize, name="cl_probes", tag=tag) rval.full_args = full_args # prevent garbage-collection rval.cl_bufpositions = cl_bufpositions rval.Y = Y return rval
# -*- coding: utf-8 -*- # Use this file to easily define all of your cron jobs. # # It's helpful to understand cron before proceeding. # http://en.wikipedia.org/wiki/Cron # # Learn more: http://github.com/fengsp/plan from plan import Plan cron = Plan("commands") cron.command('top', every='4.hour', output=dict(stdout='/tmp/top_stdout.log', stderr='/tmp/top_stderr.log')) cron.command('yourcommand', every='sunday', at='hour.12 minute.0 minute.30') # more commands here if __name__ == "__main__": cron.run()
# -*- coding: utf-8 -*- # Use this file to easily define all of your cron jobs. # # It's helpful to understand cron before proceeding. # http://en.wikipedia.org/wiki/Cron # # Learn more: http://github.com/fengsp/plan from plan import Plan cron = Plan("scripts", path='/web/yourproject/scripts', environment={'YOURAPP_ENV': 'production'}) cron.script('script.py', every='1.day') cron.script('script_2.py', every='1.month', at='hour.12 minute.0') # more scripts here if __name__ == "__main__": cron.run()
def repairCallback(self, data): """Receives a message on the repair topic""" if self.state == State.DEAD: self.repair_sub.unregister() return type = data.type time = data.time sender = data.sender msg = data.data if self.agent == sender: return if self.state == State.DEAD: return with self.mutex: if type == "repairRequest": if self.state == State.REPAIRINGACTIVE: #Another robot is trying to repair. Abort the repair for one of them if self.agent < sender: logger.warning("%s is also trying to repair. He has priority. Canceling my repair" % sender) pass #cancel my reparation else: logger.warning("%s is also trying to repair. I have priority. Ignoring its message" % sender) return elif self.state == State.TRACKING: p = self.plan.getLocalJsonPlan(self.agent) for k in list(p["actions"].keys()): if "executed" not in p["actions"][k] or not "executed" not in p["actions"][k]: Plan.removeAction(p, k) p["state"] = "tracking" self.sendNewStatusMessage("repairResponse", json.dumps(p)) return elif self.state not in [State.RUNNING, State.TRACKINGCONFIRMATION, State.DONE]: logger.error("Received a repair request not when running. Ignoring it") return logger.info("Received a repair request. Pausing the execution") self.state = State.REPAIRINGPASSIVE self.sendVisuUpdate() self.sendNewStatusMessage("repairResponse", json.dumps(self.plan.getLocalJsonPlan(self.agent))) elif type == "repairResponse": try: plan = json.loads(msg) except TypeError: logger.error("Receive a repair message with msg not a json string : %s" % msg) return if "repairResponse" not in dir(self): return #I'm not currently repairing if sender not in self.repairResponse: self.repairResponse[sender] = plan logger.info("Receive a repair response from %s " % sender) else: logger.error("Received several response from %s. Keeping only the first one" % sender) elif type == "repairDone": logger.info("Receiving a new plan to execute from %s" % sender) planStr = msg if self.state in [State.REPAIRINGPASSIVE, State.TRACKING]: self.init(planStr, self.agent) else: logger.warning("I'm not is the right state but I received a new plan. Ignoring it, will sync later if needed") if self.state == State.REPAIRINGPASSIVE: self.state = State.RUNNING self.sendVisuUpdate() elif type == "targetFound": with self.mutex: self.targetFound(json.loads(msg), selfDetection = False) elif type == "planSyncRequest": self.receivePlanSyncRequest(sender) elif type == "planSyncResponse": msg = json.loads(msg) if "plan" not in msg: logger.error("Received an ill-formated planSyncResponse : %s" % msg) otherPlan = msg["plan"] self.receivePlanSyncResponse(sender, otherPlan) else: logger.warning("Received unsupported message of type %s from %s : %s" % (type, sender, msg))
console_handler = logging.StreamHandler(sys.stderr) console_handler.setFormatter(logging.Formatter()) console_handler.setLevel(logging.INFO) root_logger = logging.getLogger() root_logger.addHandler(console_handler) root_logger.setLevel(logging.DEBUG) if __name__ == "__main__": setup_logging() args = docopt(__doc__) if args["load"]: with open(args["<filename>"], 'r') as f: pprint.pprint(load(f)) elif args["parse"]: plan = Plan.from_file(args["<filename>"]) print plan elif args["process"]: assembly = Assembly.from_plan(args["<filename>"]) print assembly elif args["generate"]: assembly = Assembly.from_plan(args["<filename>"]) assembly.generate_files(os.path.split(args["<filename>"])[0], args["<output_folder>"]) elif args["generun"]: assembly = Assembly.from_plan(args["<filename>"]) assembly.generate_files(os.path.split(args["<filename>"])[0], args["<output_folder>"]) os.chdir(os.path.join(args["<output_folder>"], assembly.plan.name)) client = Client(docker_url()) assembly.run(client) elif args["start"]:
def receivePlanSyncResponse(self, sender, otherPlan): """callback for the plan synchronisation""" with self.mutex: myID = self.plan.ids[-1] otherID = otherPlan["ID"]["value"] if myID == otherID: return logger.info("I'm executing plan %s. %s is executing %s" % (self.plan.ids, sender, otherPlan["ID"])) if self.newPlanToImport is not None: newID = json.loads(self.newPlanToImport)["ID"]["value"] if newID != otherID: logger.warning("I'm executing %s. I received a new plan %s and a previous plan %s. Ignoring this one" % (myID,otherID,newID)) else: logger.info("Ignoring this plan since I have already ask for its use") return if myID in otherPlan["ID"]["parents"]: logger.info("The other has repaired and I was not notified. I need to update my plan") agents = set([a["agent"] for a in otherPlan["actions"].values() if "agent" in a]) logger.info("List of agents in this plan : %s " % agents) plansDict = {} #Prevent the removal of coms time otherPlan["current-time"] = (time.time() - self.beginDate) p = Plan(json.dumps(otherPlan), self.agent) #Check that all my communications are still in the plan droppedComs = set() #In my current plan foreignDroppedCom = set() #In the remote plan for k,a in self.plan.actions.items(): if a["agent"] == self.agent: if k in p.actions and a["name"] == p.actions[k]["name"]: continue #Ok, my action is still here elif k not in p.actions and "communicate" in a["name"]: #self.dropCommunication(a["name"]) droppedComs.add(a["name"]) else: logger.error("My action %s (%s) is not in the new plan" % (a["name"],k)) for k,a in p.actions.items(): if a["agent"] == self.agent: if k in self.plan.actions and a["name"] == self.plan.actions[k]["name"]: continue #Ok, my action is still here if "communicate-meta" in a["name"]: comName = a["name"] if comName in self.droppedComs or a["name"]: #I already dropped this com. logger.warning("They kept a com that I dropped %s (%s)" % (a["name"],k)) foreignDroppedCom.add(k) else: logger.error("They added an com action for me %s (%s)" % (a["name"],k)) logger.error("%s not in %s" % (comName, self.droppedComs)) else: logger.error("They added an action for me %s (%s)" % (a["name"],k)) for k1,a1 in self.plan.actions.items(): if a1["name"] == a["name"]: logger.error("I have this action with key %s" % k1) for k,a in self.plan.actions.items(): if "communicate-meta" in a["name"] and k not in p.actions: droppedComs.add(a["name"]) for c in droppedComs: self.dropCommunication(c) if foreignDroppedCom: logger.info("Imported plan before removing a foreign com: %s" % json.dumps(otherPlan)) for c in foreignDroppedCom: logger.info("Removing action %s (%s)" % (otherPlan["actions"][c], c)) logger.info("Length before %s" % len(otherPlan["actions"])) otherPlan = Plan.removeAction(otherPlan, c) #remove the com meta for actions that I dropped logger.info("Length after %s" % len(otherPlan["actions"])) logger.info("Imported plan after removing a foreign com: %s" % json.dumps(otherPlan)) p = Plan(json.dumps(otherPlan), self.agent) for a in agents: if a != self.agent: plansDict[a] = p.getLocalJsonPlan(a) plansDict[self.agent] = self.plan.getLocalJsonPlan(self.agent, currentTime=(time.time() - self.beginDate)) p = Plan.mergeJsonPlans(plansDict, idAgent = sender) p["current-time"] = plansDict[self.agent]["current-time"] # See if the plan is still temporally valid. It could be a problem if I added a ub for a com # while the other robot was late : both constraints are problematic. In this case, drop my # current com or the com of the other robot try: _ = Plan(json.dumps(p), self.agent) except PlanImportError as e: logger.warning("The fused plan will not be valid. Try to drop a current com") for action in p["actions"].values(): #for name,_,_ in self.ongoingActions: name = action["name"] if self.agent not in name: continue #if not (action["startTp"] in p["absolute-time"] and not action["endTp"] in p["absolute-time"]):continue if "communicate " in name: logger.info("I want to drop (%s,%s,%s)" % (name,action["startTp"],action["endTp"])) logger.info("%s" % (action)) logger.info(action["startTp"] in p["absolute-time"] ) logger.info(action["endTp"] in p["absolute-time"] ) logger.info([a["name"] for a in self.plan.actions.values()]) logger.info([a["name"] for a in otherPlan["actions"].values()]) if action["agent"] == self.agent: #Find the com meta name robot1,robot2 = name.split(" ")[1:3] nameIndex = None for k,a in self.plan.actions.items(): if a["name"].startswith("communicate-meta %s %s" % (robot1,robot2)) or\ a["name"].startswith("communicate-meta %s %s" % (robot2,robot1)): name = a["name"] nameIndex = k break if nameIndex is None: logger.error("Could not find the index of the com meta action ! %s" % name) continue #self.state = State.ERROR #return else: logger.warning("Dropping %s (%s)" % (name, nameIndex)) self.dropCommunication(name) else: robot1,robot2 = name.split(" ")[1:3] nameIndex = None for k,a in otherPlan["actions"].items(): if a["name"].startswith("communicate-meta %s %s" % (robot1,robot2)) or\ a["name"].startswith("communicate-meta %s %s" % (robot2,robot1)): name = a["name"] nameIndex = k break if nameIndex is None: logger.error("Could not find the index of the com meta action ! %s" % name) continue #self.state = State.ERROR #return logger.info("Length before %s" % len(otherPlan["actions"])) otherPlan = Plan.removeAction(otherPlan, nameIndex) #remove the com meta for actions that I dropped logger.info("Length after %s" % len(otherPlan["actions"])) p = Plan(json.dumps(otherPlan), self.agent) for a in agents: if a != self.agent: plansDict[a] = p.getLocalJsonPlan(a) plansDict[self.agent] = self.plan.getLocalJsonPlan(self.agent, currentTime=(time.time() - self.beginDate)) p = Plan.mergeJsonPlans(plansDict, idAgent = sender) p["current-time"] = plansDict[self.agent]["current-time"] self.newPlanToImport = json.dumps(p) #logger.info("Other plans are : %s" % plansDict) #logger.info("New plan to import next is %s" % self.newPlanToImport) return elif otherID in self.plan.ids: logger.info("I'm more up to date. Do nothing") return else: logger.info("We are not on the same branch : repair the plan") self.triggerRepair = True
def from_plan(cls, planfile, config='config'): plan = Plan.from_file(planfile) pp = PlanProcessor(plan, config) return pp.process_plan()
from plan import Plan cron = Plan(__name__) cron.script('test', every='5.minute') cron.run('update')
# Copyright 2014 CourseApp.me All rights reserved # # Authors: Paul D'Amora # run.py runs entirely independent of flask and courseapp as a whole # it simply write schedule.py to crontab which will contain all periodic tasks from plan import Plan cron = Plan() # Add this script to the cron object and run it every 10 minutes #cron.script('schedule.py', every='10.minute') # This file needs to be ran from the terminal # cron.run takes a bunch of different options if __name__ == '__main__': cron.run('write') # could be 'check', 'write', 'update', 'clear'
from plan import Plan import os cron = Plan() crontabDir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'crontab') hours = 'hour.19 hour.20 hour.21 hour.22 hour.23 hour.0 hour.1 hour.2 hour.3 hour.4 hour.5 hour.6 hour.7' ######################################################################## # 定时spider name = 'cron_spider' cron.command('flock -xn /tmp/%s.lock -c %s.sh' % (name, os.path.join(crontabDir, name)), every='1.minute') ######################################################################## # phash计算 name = 'cron_phash' cron.command('flock -xn /tmp/%s.lock -c %s.sh' % (name, os.path.join(crontabDir, name)), every='1.day', at=hours) ######################################################################## # 匹配计算 name = 'cron_match' cron.command('flock -xn /tmp/%s.lock -c %s.sh' % (name, os.path.join(crontabDir, name)), every='1.day', at=hours) ######################################################################## # 匹配计算 name = 'cron_itemmatch'