def get_result_plot_js(master_iteration_id): """ Get the last result plot using html/javascript :return: """ current_f_name = inspect.currentframe().f_code.co_name if not pipong_is_master(): logger.debug( "{}: This node is not a master}".format( current_f_name)) abort(404) master_it = db.session.query(models.MasterIteration).filter_by(id=master_iteration_id).first() if master_it is None: logger.error("{}: No MasterIteration found with id: {}".format( current_f_name, master_iteration_id)) abort(404) if not master_it.json_graph: logger.error("{}: Empty json_graph for id: {}".format( current_f_name, master_iteration_id)) abort(404) return render_template('master/result_plot_js.html', title='Result Plot JS', graph_data_json=master_it.json_graph, master_iteration_id=master_iteration_id)
def getPath(self, start, flying=False): if flying: if not self.flying_path: logger.debug("Generating flying path") path, success = pypf.get_path( self.f_grid, self.f_grid, start, self.goal ) if success: self.flying_path = path else: logger.debug("Could not find path to goal for flying.") else: success = True path = self.flying_path else: path, success = pypf.get_path( self.w_grid, self.w_grid, start, self.goal ) if success: return path else: return False
def get_result_plot_json(master_iteration_id): """ Get the last result plot data in json :return: """ current_f_name = inspect.currentframe().f_code.co_name if not pipong_is_master(): logger.debug( "{}: This node is not a master}".format( current_f_name)) abort(404) master_it = db.session.query(models.MasterIteration).filter_by(id=master_iteration_id).first() if master_it is None: logger.error("{}: No MasterIteration found with id: {}".format( current_f_name, master_iteration_id)) abort(404) if not master_it.json_graph: logger.error("{}: Empty json_graph for id: {}".format( current_f_name, master_iteration_id)) abort(404) js_graph = json.loads(master_it.json_graph) for e in js_graph['links']: e['left'] = False e['right'] = True return jsonify(js_graph)
def test_post_command(self): # response = self.app.post('/commands', data=json.dumps({'filename': 'commands.txt'}), content_type='application/json') response = self.app.post("/commands", query_string='filename='+'commands.txt',content_type='text/plain') self.assertEqual(response.status_code, 200) logger.debug('Response from post request to commands url is {}'.format(response.data)) logger.debug('Response code post request to commands is {}'.format(response.status_code)) return response
def remove_old_nodes(): """ Delete older pinger and pongers registered in more than 30 minutes :return: """ current_f_name = inspect.currentframe().f_code.co_name logger.info("{}: Remove_old_nodes called".format(current_f_name)) if not pipong_is_master(): return None since = datetime.now() - timedelta(minutes=30) s = db.session() pinger_t = db.session.query(models.RegisteredPingerNode).filter( or_(models.RegisteredPingerNode.last_updated_date is None, models.RegisteredPingerNode.last_updated_date < since)) logger.debug("{}: Old pingers: {}".format(current_f_name, pinger_t.count())) pinger_t.delete() ponger_t = db.session.query(models.RegisteredPongerNode).filter( or_(models.RegisteredPongerNode.last_updated_date is None, models.RegisteredPongerNode.last_updated_date < since)) logger.debug("{}: Old pongers: {}".format(current_f_name, ponger_t.count())) ponger_t.delete() s.commit()
def spawnMob(self, variant, free=False): # name = str(self.mobtier) + variant name = str(1) + variant options = { "1Q": Mob(self, name), "1W": Mob1W(self, name), "1E": Mob1E(self, name), "1R": Mob1R(self, name), "1A": Mob1A(self, name), "1S": Mob1S(self, name), "1D": Mob1D(self, name), "1F": Mob1F(self, name), "1Z": Mob1Z(self, name), "1X": Mob1X(self, name), "1C": Mob1C(self, name), "1V": Mob1V(self, name), } try: mob = options[name] except KeyError as err: logger.debug("Mob not found: {0}".format(err)) return False if self.mobtier == 2: mob.hp_max *= 50 mob.hp = mob.hp_max mob.bounty *= 50 if mob.bounty * 2 <= self.ai_gold or free: if not free: self.ai_gold -= mob.bounty * 2 self.mobs.append(mob)
def finish_old_iterations(): """ Finish the iterations that are older than 30 minutes :return: """ current_f_name = inspect.currentframe().f_code.co_name logger.info("{}: Remove_old_nodes called".format(current_f_name)) if not pipong_is_master(): return None since = datetime.now() - timedelta(minutes=30) s = db.session() master_t = db.session.query(models.MasterIteration).filter( or_(models.MasterIteration.created_date is None, models.MasterIteration.created_date < since)) logger.debug("{}: Old iterations: {}".format(current_f_name, master_t.count())) for e in master_t: e.status = "FINISHED" s.commit()
def predict(self): global ITEM_QUEUE, RETRY_TIME retry = 0 # retry times logger.info(f"Start predict thread") while retry <= RETRY_TIME: try: item = ITEM_QUEUE.get(timeout=3) text = item["clean_text"] tokens = model.prepross(text) item["predict"] = model.predict(tokens) if len(tokens) else (1, 0, 0, 0) # write data fhandler = FileWriter(self.filename, mode=self.mode) with fhandler as writer: writer.write(json.dumps(item, ensure_ascii=False) + "\n") logger.debug(f"Predict item id is {item['id']}") # if get item right, re_initial retry retry = 0 except queue.Empty: logger.debug(f"Queue is empty, wait 1 seconds.") retry += 1 time.sleep(1) # exhausted RETRY_TIME logger.info(f"Data item is exhausted")
def test_commands_get(self): response = self.app.get("/commands") self.assertEqual(response.status_code, 200) logger.debug('Response from get request to commands url is {}'.format(response.data)) logger.debug('Response code get request to commands is {}'.format(response.status_code)) self.assertEqual(response.status_code, 200) return response
def highlightItems(self, items): if items: if len(items) == 1: logger.debug("Adding {0} as selected tower".format(items[0])) self.selected_mouse = items[0] else: self.highlighted = items
def test_incorrect_params(self): file_data = {'COMMANDS_LIST':['ls','ls -al'],'VALID_COMMANDS': ['ls','ls -al']} response = self.app.post('/commands', query_string='file_data='+json.dumps(file_data), content_type='application/json') self.assertEqual(response.status_code, 400) self.assertEqual(response.data, 'Parameters did not match expected format') logger.debug('response data after posting fiile_data as json payload '.format(response.data)) return response
def package(self, branch): logger.debug("package project:") shell = ("cd {0} && git checkout {1} && mvn package -DskipTests=true" ).format(self.location + self.dest, branch) rc = LocalShell.call(shell, shell=True) if rc != 0: raise RuntimeError
def post(self, payload): """Create a new user. Only authenticating user can post User ID is defined by the verified subject in the access token """ logger.info(f"POST request to create user " f"{payload['sub']} from {request.remote_addr}") message = "" try: user = User(**api.payload) user.id = payload['sub'] api.payload['id'] = payload['sub'] user.insert() code = 201 except ValueError as e: code = 422 message = str(e) except IntegrityError: code = 422 message = "Cannot add to existing user. " \ "Use Patch request instead" except Exception as e: logger.debug(e) code = 400 message = "The request data format is not valid" if code != 201: abort(code, message) return api.payload, 201
def get_result_plot(master_iteration_id): """ Get the last result plot using matplotlib :return: """ current_f_name = inspect.currentframe().f_code.co_name if not pipong_is_master(): logger.debug( "{}: This node is not a master}".format( current_f_name)) abort(404) master_it = db.session.query(models.MasterIteration).filter_by(id=master_iteration_id).first() if master_it is None: logger.error("{}: No MasterIteration found with id: {}".format( current_f_name, master_iteration_id)) abort(404) if not master_it.json_graph: logger.error("{}: Empty json_graph for id: {}".format( current_f_name, master_iteration_id)) abort(404) js_graph = json.loads(master_it.json_graph) G = json_graph.node_link_graph(js_graph) pos = nx.drawing.nx_agraph.graphviz_layout(G, prog='dot') node_labels = {} for k, v in G.nodes(data=True): node_labels[k] = '{}\n{:.2f}%'.format(k, v['mean']) nx.draw_networkx_labels(G, pos, labels=node_labels, font_size=5) node_colors = [G.node[n]['mean'] for n in G.nodes()] nx.draw_networkx_nodes( G, pos, node_color=node_colors, node_size=600, node_shape='o', cmap=plt.cm.OrRd, vmin=0., vmax=100.) nx.draw_networkx_edges( G, pos, arrowstyle='-|>', arrowsize=20, edge_color='black', width=1) fig = plt.gcf() fig.set_size_inches(30, 20) plt.savefig('/tmp/last_generated_result.png', dpi=250) output = io.BytesIO() FigureCanvas(fig).print_png(output) return Response(output.getvalue(), mimetype='image/png')
def get(self, payload, user_id): """Obtain user information. Only authenticated user can access their own resource""" logger.info(f"GET request to user " f"{user_id} from {request.remote_addr}") user = User.query.get(user_id) if user is None: logger.debug(f"GET error {user_id} does not exist ") abort(404, f"User {user_id} does not exist.") return user
def clone(self): logger.debug("clone repo:") shell = ("mkdir -p {0} && cd {0} && git clone -q {1}").format( self.location, self.url) rc = LocalShell.call(shell, shell=True) if rc == 128: shell = ("cd {0} && git pull").format(self.location + self.dest) rc = LocalShell.call(shell, shell=True) # branch name required if rc == 0: return if rc != 0: raise RuntimeError
def pathFinding(self, dt, limit=1): if len(self.pf_queue) > 0: if len(self.pf_queue) >= 30: limit *= 2 logger.debug("Calculating paths for pf_queue.") logger.debug("Length of queue: {0}.".format(len(self.pf_queue))) count = 0 for m in self.pf_queue: if count == limit: break m.updateTarget() self.pf_queue.remove(m) count += 1
def start_iperf_server(): """ This method is to be executed by a pinger. The ponger reserves a port to be used exclusivelly by the requesting pinger :return: """ current_f_name = inspect.currentframe().f_code.co_name logger.info("{}: start_iperf_server".format(current_f_name)) if not pipong_is_ponger(): return jsonify({ 'result': 'failure', 'msg': 'this server is not a ponger' }) ip_addr = request.remote_addr pingerp_t = db.session.query( models.AllocatedPingerPort).filter_by(address=ip_addr).first() if not pingerp_t: all_t = db.session.query(models.AllocatedPingerPort).all() all_ports = [row.port for row in all_t] possible_ports = list( range(app.config['RESERVED_PORT_RANGE_MIN'], app.config['RESERVED_PORT_RANGE_MAX'])) available_ports = sorted(list(set(possible_ports) - set(all_ports))) port = available_ports[0] logger.debug("{}: For host:{} new selected port generated:{}".format( current_f_name, ip_addr, port)) s = db.session() pingp_t = models.AllocatedPingerPort(address=ip_addr, port=port) s.add(pingp_t) s.commit() else: port = pingerp_t.port logger.debug("{}: For host:{} selected port:{}".format( current_f_name, ip_addr, port)) result = tasks.ponger_tasks.create_iperf_server.delay(port) creation_status = result.get() if not creation_status: return jsonify({ 'result': 'failure', 'msg': 'cannot start iperf server' }) return jsonify({'result': 'success', 'port': port})
def getDragSelection(self, rect): if rect: selection = [] for t in self.towers: if check_point_rectangle(t.x, t.y, rect): logger.debug("Found {0} in drag rectangle.".format(t)) selection.append(t) if not len(selection): logger.debug("No towers in rect {0}".format(rect)) return False else: return selection else: return False
def checkout_branch(self, branch, version=""): logger.debug("checkout branch:") if branch in self.local_branch(): LocalShell.check_call( "cd {0} && git checkout -q {1} && git pull " "-q origin {1} && git reset --hard {2}".format( self.location + self.dest, branch, version), shell=True) else: LocalShell.check_call("cd {0} && git checkout -q -b {1} -t " "origin/{1} && git pull -q origin {1} && " "git reset --hard {2}".format( self.location + self.dest, branch, version), shell=True)
async def _stocks(self, ctx, ticker, param=None, start=None, end=None): logger.debug(f'{ctx.message.content} <{ticker}>') if param and param == 'history': data = ft.finance_history(ticker, start, end) elif param == 'calendar': data = ft.finance_calendar(ticker) else: await ctx.send(f'Retrieving stock info {ticker}') data = ft.finance_helper(ticker) await ctx.send(f'Tada!') logger.debug(data) data = split_2000(data) for d in data: await ctx.send(d)
def _run(self, ds): tmp_file_name, tmp_file_name_with_path = BackupJob.construct_filename( self.mongo_config.prefix, # need an array [self.mongo_config.database], self.mongo_config.suffix, self.base_config.tmp_folder, self.construct_dt(ds)) try: # dump mongo collection to file # mongodump --username xxx --password xxx --host xxx --db xxx --out xxx mongo_dump_command = "mongodump --host {} --username {} --password {} --db {} ".format( self.mongo_config.host, self.mongo_config.username, self.mongo_config.password, self.mongo_config.database, ) if self.mongo_config.password else "mongodump --host {} --db {}".format( self.mongo_config.host, self.mongo_config.database) command = "{} --gzip --archive={}".format(mongo_dump_command, tmp_file_name_with_path) logger.debug("running {}".format(command)) c = delegator.run(command) logger.warning("dumped back up file {}".format(tmp_file_name)) if c.return_code != 0: raise RuntimeError(c.std_err) # upload if not dry_run if not self.base_config.dry_run: self.uploader.upload(tmp_file_name, tmp_file_name_with_path) except (RuntimeError, AssertionError) as e: # log logger.error(e) finally: # delete if self.base_config.delete_tmp_file: BackupJob.safe_delete(tmp_file_name_with_path) else: logger.warning("tmp file deletion is off!")
def run(self): tmp_file_name, tmp_file_name_with_path = BackupJob.construct_filename( self.sql_config.prefix, # need an array [self.sql_config.database], self.sql_config.suffix, self.base_config.tmp_folder) try: # dump sql to file # mysqldump -h xxx -d xxx -u root | gzip --best | openssl des -salt -k xxxxxx sql_dump_command = "mysqldump -h{} -u{} -p{} {}".format( self.sql_config.host, self.sql_config.username, self.sql_config.password, self.sql_config.database ) if self.sql_config.password else "mysqldump -h {} -u {} {}".format( self.sql_config.host, self.sql_config.username, self.sql_config.database) command = "{} | gzip --best | openssl des -salt -k {} > {}".format( sql_dump_command, self.base_config.passphrase, tmp_file_name_with_path) logger.debug("running {}".format(command)) c = delegator.run(command) logger.warning("dumped back up file {}".format(tmp_file_name)) if c.return_code != 0: raise RuntimeError(c.std_err) # upload if not dry_run if not self.base_config.dry_run: self.uploader.upload(tmp_file_name, tmp_file_name_with_path, self.sql_config.expired) except (RuntimeError, AssertionError) as e: # log logger.error(e) finally: # delete if self.base_config.delete_tmp_file: BackupJob.safe_delete(tmp_file_name_with_path) else: logger.warning("tmp file deletion is off!")
def post(self, payload, user_id): '''Post a progress''' logger.info(f"POST request to track progress for {user_id} " f"from {request.remote_addr}") message = "" try: user = User.query.get(user_id) if user is None: code = 404 message = f"Cannot track progress for {user_id}. " \ f"User does not exist." else: track_date = api.payload["track_date"] weight = api.payload["weight"] mood = api.payload["mood"].lower() diet = api.payload["diet"].lower() api.payload["user_id"] = user_id progress = Progress(user_id=user_id, track_date=track_date, weight=weight, mood=mood, diet=diet) progress.insert() code = 201 except ValueError as e: code = 422 message = str(e) except IntegrityError: code = 422 message = "Cannot add to existing progress. " \ "Use Patch request instead" except Exception as e: logger.debug(e) code = 400 message = "The request data format is not valid" if code != 201: abort(code, message) return api.payload, 201
def patch(self, payload, user_id): '''Patch a progress''' logger.info(f"PATCH request to progress for {user_id} " f"from {request.remote_addr}") message = "" try: user = User.query.get(user_id) track_date = api.payload["track_date"] if user is None: code = 404 message = f"Cannot modify progress for {user_id}." \ f" User does not exist." else: progress = Progress.query \ .filter(Progress.user_id == user_id) \ .filter(Progress.track_date == track_date).first() if progress is None: code = 404 message = f"Cannot modify progress for {track_date}. " \ f"This progress does not exist." else: progress.update(api.payload) code = 204 except ValueError as e: code = 422 message = str(e) except IntegrityError: code = 422 message = "Cannot add to existing progress." \ " Use Patch request instead" except Exception as e: logger.debug(e) code = 400 message = "The request data format is not valid" if code != 204: abort(code, message) return '', 204
def run(self): tmp_file_name, tmp_file_name_with_path = BackupJob.construct_filename( self.redis_config.prefix, # need an array ["redis"], self.redis_config.suffix, self.base_config.tmp_folder) rdb_tmp_file_name, rdb_tmp_file_name_with_path = BackupJob.construct_filename( self.redis_config.prefix, # need an array ["redis", "rdbdump"], "rdb", self.base_config.tmp_folder) try: command = "cp {} {}".format(self.redis_config.rdb_path, rdb_tmp_file_name_with_path) logger.debug("running {}".format(command)) c = delegator.run(command) if c.return_code != 0: raise RuntimeError(c.std_err) command = "gzip -9c {} | openssl des -salt -k {} > {}".format( rdb_tmp_file_name_with_path, self.base_config.passphrase, tmp_file_name_with_path) logger.debug("running {}".format(command)) c = delegator.run(command) if not self.base_config.dry_run: self.uploader.upload(tmp_file_name, tmp_file_name_with_path, self.redis_config.expired) except (RuntimeError, AssertionError) as e: # log logger.error(e) finally: # delete if self.base_config.delete_tmp_file: BackupJob.safe_delete(rdb_tmp_file_name_with_path) BackupJob.safe_delete(tmp_file_name_with_path) else: logger.warning("tmp file deletion is off!")
def find(cls, query): """Return all streets starting with query from database.""" errors = [] streets = cls.query.filter(cls.name.startswith(query)).all() rv = {} for street in streets: logger.debug(street) range_data = { "id": street.id, "name": street.number_range } if street.name not in rv: rv[street.name] = { "name": street.name, "ranges": [range_data] } else: rv[street.name]["ranges"].append(range_data) return [s for s in rv.values()]
def patch(self, payload, user_id): """ Update user. Authenticated user can only access their own resource """ logger.info(f"PATCH request to modify user {user_id} " f"from {request.remote_addr}") message = "" try: user = User.query.get(user_id) if user is None: logger.debug(f"PATCH error {user_id} does not exist ") code = 404 message = f"User {user_id} does not exist." else: user.update(api.payload) code = 204 except ValueError as e: message = str(e) code = 422 except IntegrityError: code = 422 message = "Cannot patch existing user. Use Patch request instead" except Exception as e: logger.debug(e) code = 400 message = "The request data format is not valid" if code != 204: abort(code, message) logger.debug(f"Modifying {user_id} is successful requested " f"from {request.remote_addr}") return '', 204
def delete(self, payload, user_id): """Delete existing user. Require delete:user permission This will also delete associated progress for this user """ logger.info(f"DELETE request to user " f"{user_id} from {request.remote_addr}") message = "" try: user = User.query.get(user_id) if user is None: logger.debug(f"DELETE error {user_id} does not exist ") code = 404 message = f"User {user_id} does not exist." else: associated_progress = Progress.query\ .filter(Progress.user_id == user_id).all() if associated_progress is not None: logger.debug(f"DELETE: deleting all progress" f" related to {user_id}") for progress in associated_progress: progress.delete() user.delete() code = 204 except Exception as e: logger.debug(f"DELETE error {user_id} Exception: {e} ") code = 500 message = f"Server encountered issue deleting user {user_id}" if code != 204: abort(code, message) return '', 204
def doAction(self, entry): self.w.playSFX("click") self.onUp(entry) e = entry if e.action == "newgame": self.animateOut() self.on_anim_end = e.action elif e.action == "selectmap": index = self.w.maplist.index(self.w.selected_mapfile) if index == len(self.w.maplist) - 1: self.w.selected_mapfile = self.w.maplist[0] else: self.w.selected_mapfile = self.w.maplist[index + 1] str_len = len(self.w.selected_mapfile) if str_len > self.max_char: string = self.w.selected_mapfile[str_len - self.max_char:] else: string = self.w.selected_mapfile e.label.text = string elif e.action == "resume": self.animateOut() self.on_anim_end = e.action elif e.action == "settings": self.animateOut() self.on_anim_end = e.action elif e.action == "togglesound": self.w.sound_enabled = not self.w.sound_enabled e.label.text = "Sound: {0}".format(self.w.sound_enabled) logger.debug( "Toggled sound_enabled to {0}.".format(self.w.sound_enabled) ) elif e.action == "topmenu": self.animateOut() self.on_anim_end = e.action elif e.action == "quit": self.animateOut() self.on_anim_end = e.action
def register_pinger(): """ Register a pinger in this master node :return: """ current_f_name = inspect.currentframe().f_code.co_name if not pipong_is_master(): return jsonify({ 'result': 'failure', 'msg': 'this server is not a master' }) data = request.get_json() ip_addr = request.remote_addr api_port = data['api_port'] api_protocol = data['api_protocol'] registrered_t = db.session.query(models.RegisteredPingerNode).filter_by( address=ip_addr, api_port=api_port).first() s = db.session() if not registrered_t: pingp_t = models.RegisteredPingerNode( address=ip_addr, api_port=api_port, api_protocol=api_protocol) s.add(pingp_t) logger.debug( "{}: Registering ping: host:{} api_port:{} api_protocol:{}".format( current_f_name, ip_addr, api_port, api_protocol)) else: registrered_t.last_updated_date = datetime.now() s.commit() return jsonify({'result': 'success'})
def run(self): base_folder = self.base_config.tmp_folder database = self.sql_config.database for table in self.sql_config.tables: previous_dump_name = MonitorJob.construct_filename_with_path(base_folder, database, table, "previous") current_dump_name = MonitorJob.construct_filename_with_path(base_folder, database, table, "current") try: # dump sql to file # mysqldump --skip-comments --skip-extended-insert -u root -p db table > file.sql sql_dump_command = "mysqldump --skip-comments --skip-extended-insert -h{} -u{} -p{} {} {} > {}".format( self.sql_config.host, self.sql_config.username, self.sql_config.password, database, table, current_dump_name ) if self.sql_config.password else "mysqldump --skip-comments --skip-extended-insert -h{} -u{} {} {} > {}".format( self.sql_config.host, self.sql_config.username, database, table, current_dump_name ) command = sql_dump_command logger.debug("running {}".format(command)) c = delegator.run(command) if c.return_code != 0: raise RuntimeError(c.std_err) # Compare and send command = "diff {} {}".format(previous_dump_name, current_dump_name) logger.debug("running {}".format(command)) c = delegator.run(command) if c.return_code == 1: msg = { "title": "Diff on {}-{}".format(database, table), "text": self.reconstruct_msg(c.out) } logger.debug(msg) self.webhook.send_msg(msg) else: logger.error("Diff failed {}".format(c.err)) # move file os.rename(current_dump_name, previous_dump_name) MonitorJob.safe_delete(current_dump_name) except (RuntimeError, AssertionError) as e: # log logger.error(e)
def get_item(self): """Get Data Item In Loop There is a loop can get data item, that can be put in the a global queue `ITEM_QUEUE`. It is a condition that data id stop be updated, which the method exit. """ global ITEM_QUEUE logger.debug("Start Retive Data Item") while True: try: session = self.retrivor.Session() query = session.query(DataModel) \ .filter(DataModel.id>=self.retrivor.start_id) \ .order_by(DataModel.id) \ .limit(30) \ .all() for item in query: result = {} result["id"] = item.id result["content"] = item.content result["media_id"] = item.media_id result["source"] = item.source result["title"] = item.title result["tenden"] = item.tenden result["clean_text"] = BeautifulSoup( item.title + item.content, "html.parser").get_text() # put restult into the queue ITEM_QUEUE.put(result) logger.debug(f"Get One Item id:<{result['id']}>") time.sleep(0.5) # if item id doesn't update, stop loop if item.id == self.retrivor.start_id: logger.debug(f"Data extracted done") return else: logger.debug( f"Update retrivor object start id:<{item.id}>") self.retrivor.start_id = item.id finally: session.close()
def call(*args, **kargs): cmd = kargs.get("args") or args[0] logger.debug("local shell: %s" % cmd) print cmd process = Popen(*args, stdout=PIPE, stderr=PIPE, **kargs) stdout, stderr = process.communicate() stdout = stdout.decode("utf-8") stderr = stderr.decode("utf-8") rc = process.poll() logger.debug("rc: %d" % rc) logger.debug("stdout: %s" % stdout) logger.warn("stderr: %s" % stderr) return rc
def check_output(*args, **kargs): cmd = kargs.get("args") or args[0] logger.debug("local shell: %s" % cmd) process = Popen(*args, stdout=PIPE, stderr=PIPE, **kargs) stdout, stderr = process.communicate() stdout = stdout.decode("utf-8") stderr = stderr.decode("utf-8") rc = process.poll() logger.debug("rc: %d" % rc) logger.debug("stdout: %s" % stdout) logger.warn("stderr: %s" % stderr) if rc: raise CalledProcessError(rc, cmd, stdout) return stdout
def perform_pipong_iteration_1(pinger_iteration_id): """ First iteration of the discovery and monitor Create all tracert configurations on the DB perform the tasks asynchronously When all task are finished a callback is performed to the second iteration step :param pinger_iteration_id: the iteration id from the db :return: """ current_f_name = inspect.currentframe().f_code.co_name logger.info("{}: Perform_pipong_iteration_1".format(current_f_name)) iter_t = db.session.query( models.PingerIteration).filter_by(id=pinger_iteration_id).first() if iter_t is None: logger.error("{}: Iteration not found with ID: {}".format( current_f_name, pinger_iteration_id)) return if iter_t.status != "CREATED": logger.error( "{}: Iteration ID:{} is not with in CREATED status: {}".format( current_f_name, pinger_iteration_id, iter_t.status)) return s = db.session() iter_t.status = "RUNNING" s.flush() s.commit() src_port_start = 40000 for ponger in iter_t.ponger: api_port = "" if ponger.api_port != "": api_port = ":" + str(ponger.api_port) post_url = "{}{}{}/api/v1.0/iperf/server".format( ponger.api_protocol, ponger.address, api_port) logger.debug("{}: post_url: {}".format(current_f_name, post_url)) try: req_res = requests.post(post_url, auth=requestHTTPAuth( app.config['HTTP_AUTH_USER'], app.config['HTTP_AUTH_PASS']), timeout=10) except Exception as e: logger.error("{}: Error requesting servers: {}".format( current_f_name, str(e))) continue if req_res.status_code != 200: logger.error( "{}: Error creating servers: {} returned status: {}".format( current_f_name, post_url, req_res.status_code)) continue json_data = req_res.json() if 'port' not in json_data or 'result' not in json_data or \ json_data['result'] != 'success': logger.error("{}: Json data invalid: {}".format( current_f_name, json_data)) continue logger.debug("{}: Host:{}{}{} Json data: {}".format( current_f_name, ponger.api_protocol, ponger.address, api_port, json_data)) dst_port = json_data['port'] # register the tracerts src_port_end = src_port_start + iter_t.tracert_qty ponger_port_t = models.PongerPort(ponger_id=ponger.id, dst_port=dst_port, src_port_max=src_port_end, src_port_min=src_port_start) s.add(ponger_port_t) s.flush() src_port_start = src_port_end + 1 logger.debug( "{}: Creating tracert pinger_iteration_id:{} ponger_port_id:{} ". format(current_f_name, pinger_iteration_id, ponger_port_t.id)) tracert_t = models.Tracert(pinger_iteration_id=pinger_iteration_id, status='PENDING', ponger_port_id=ponger_port_t.id) s.add(tracert_t) s.flush() task_list = [] tracert_qt = db.session.query(models.Tracert).filter_by( pinger_iteration_id=pinger_iteration_id, status='PENDING') for row in tracert_qt: logger.debug("{}: Task creating tracert tasks tracert_id:{}".format( current_f_name, row.id)) task_list.append(do_dublin_tracert.s(row.id)) iter_t.status = "RUNNING_TRACEROUTE" s.flush() s.commit() # run async tasks with callback chord(task_list)(perform_pipong_iteration_2.s(pinger_iteration_id))
def update(self, new=False): self.debug = self.g.debug logger.debug("generating new grid") t_grid = [] w_grid = [] f_grid = [] for p in self.fullgrid: t_grid.append(p) w_grid.append(p) tc, wc = 0, 0 for w in self.g.tiles_no_walk: if w in w_grid: w_grid.remove(w) wc += 1 for b in self.g.tiles_no_build: if b in t_grid: t_grid.remove(b) tc += 1 if len(self.g.flightgrid) > 0: logger.debug("Flightgrid found, importing.") for f in self.g.flightgrid: f_grid.append(f) else: for p in self.fullgrid: f_grid.append(p) self.f_grid = f_grid for t in self.g.towers: for g in w_grid: # Checks for towers in grid, removes them if t.gx == g[0] and t.gy == g[1]: if (t.gx, t.gy) in t_grid: t_grid.remove(g) if (t.gx, t.gy) in w_grid: w_grid.remove(g) tc += 1 logger.debug("removed {0} grid points for towers".format(tc)) logger.debug("removed {0} grid points for no walk".format(wc)) if new == "dry": old_w, old_t = self.w_grid, self.t_grid self.w_grid = w_grid self.t_grid = t_grid newpath = self.getPath(self.start) if newpath: self.path = newpath return True else: self.w_grid = old_w self.t_grid = old_t return False else: self.w_grid = w_grid self.t_grid = t_grid if new == "update": # x1, y1 = str(self.start[0]), str(self.start[1]) # x2, y2 = str(self.goal[0]), str(self.goal[1]) # subprocess.call(["./genpath", x1, y1, x2, y2]) # points = self.importGrid() # self.path = points newpath = self.getPath(self.start) if newpath: self.path = newpath else: self.path = [self.goal]
def save(self): if not self._dirty: return logger.debug("saving '%s': %s" % (self.id, self._data)) userdb.save(self._data) self._dirty = False
def placeTower(self, t, x, y, new=False): """Positions tower and updates game state accordingly.""" grid = self.grid placed = False if t.price <= self.gold or self.debug: try: gx, gy = self.window.getGridPos(x, y) if (gx, gy) in grid.t_grid: placed = True except LookupError or ValueError as err: logger.debug("Available square not found: {0}".format(err)) if placed: new_g = gx, gy new_rg = self.window.getWindowPos(gx, gy) if not self.debug: self.gold -= t.price w_grid = grid.w_grid t.selected = False t.updatePos(new_rg[0], new_rg[1], new_g[0], new_g[1]) t.id = self.tower_count self.tower_count += 1 if new: self.towers.append(t) logger.debug("Towers: {0}".format(len(self.towers))) update = False if new_g in grid.path: update = True else: for p in grid.path: if new_g in get_diagonal( w_grid, p[0], p[1] ): update = True break elif new_g in get_neighbors( w_grid, p[0], p[1] ): update = True break if update: for m in self.mobs: if m not in self.pf_queue: if check_path(m, w_grid, new_g): self.pf_queue.append(m) if not grid.update(new="dry"): logger.warning("TOWER BLOCKING PATH") t.sell() logger.debug("New path for grid: {0}".format(update)) logger.debug( "Tower placed at [{0},{1}]".format(new_g[0], new_g[1]) ) elif t in self.towers: self.towers.remove(t) grid.update(new=False) else: self.active_tower = None self.mouse_drag_tower = None self.selected_mouse = None
def save(self): if not self._dirty: return logger.debug("saving '%s': %s" % (self.id, self._data)) get_users().save(self._data)