def start_mgmt( pyFaspMgmtPort=33500, pyFaspMgmtHost="localhost", recursionLevel=0): listener = "" if recursionLevel > 50: log.err("Error binding to port") raise ValueError("Could not bind to pyFaspMgmt port") try: pyFaspMgmtPort = conf.cfg.get("all", "mgmtPort") pyFaspMgmtHost = conf.cfg.get("all", "mgmtHost") except: pass try: listener = Listener( pyFaspMgmtHost, pyFaspMgmtPort, mgmt_connection ) except: return start_mgmt ( pyFaspMgmtPort + 1, pyFaspMgmtHost, recursionLevel+1 ) log.log("Starting pyFaspMgmt git %s built %s on port %d" % (conf.VERSION, conf.DATE, pyFaspMgmtPort), "info") global faspMgmtPort, faspMgmtSock faspMgmtSock = listener faspMgmtPort = pyFaspMgmtPort while running: listener.accept( )
def _remove(path): try: remove(path) except: err('failed attempting to remove %s\n' % _nice(path)) raise return True
def read_conf(): global conf, conf_update try: elapsed = (datetime.datetime.now() - conf_update).seconds except: elapsed = 100 if (elapsed < 5): return conf_update = datetime.datetime.now() try: conf_dir = conf["global"]["conf"] except: conf_dir = "conf" cfg = {"global": {"projects": {}}} cfg = read_conf_dir(conf_dir, cfg) try: projects = next(os.walk(cfg["global"]["paths"]["projects"]))[1] for project in projects: project = os.path.join(cfg["global"]["paths"]["projects"], project) cfg = read_conf_dir(project, cfg) except: print err() conf = cfg
def savepid(location): if not location: return ownid = os.getpid() flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY mode = ((os.R_OK | os.W_OK) << 6) | (os.R_OK << 3) | os.R_OK try: fd = os.open(location, flags, mode) except OSError: err("PIDfile already exists, not updated %s" % location) return False try: f = os.fdopen(fd, 'w') line = "%d\n" % ownid f.write(line) f.close() except IOError: err("Can not create PIDfile %s" % location) return False log("Created PIDfile %s with value %d" % (location, ownid)) atexit.register(removepid, location) return True
def create_file(path, content=""): try: p = open(path, "wb") p.write(content) p.close() except: log.err("lock.create_file", path, traceback.format_exc())
def main(argv=None): # Makes ANSI color escapes work on Windows, and strips them when # stdout/stderr isn't a terminal colorama.init() if argv is None: argv = sys.argv[1:] args, unknown = parse_args(argv) for_stack_trace = 'run as "west -v ... {} ..." for a stack trace'.format( args.command) try: args.handler(args, unknown) except WestUpdated: # West has been automatically updated. Restart ourselves to run the # latest version, with the same arguments that we were given. os.execv(sys.executable, [sys.executable] + sys.argv) except KeyboardInterrupt: sys.exit(0) except CalledProcessError as cpe: log.err('command exited with status {}: {}'.format( cpe.args[0], quote_sh_list(cpe.args[1]))) if args.verbose: raise else: log.inf(for_stack_trace) except CommandContextError as cce: log.die('command', args.command, 'cannot be run in this context:', *cce.args) except Exception as exc: log.err(*exc.args, fatal=True) if args.verbose: raise else: log.inf(for_stack_trace)
def load_text(self, string, font_size=None, font_path=None, color=None): if not font_path: assert os.path.isfile(share.GLOBAL_FONT_PATH) if not string: return try: self.clear() self.text = string for pimg in image_list_from_text( string, self.length_once, self.width, self.height, font_path, font_size, color ): img = image.Image(show=False) img.set_image(image.image_from_auto_resize(pimg)) self.image_list.append(img) self.start_index = 0 self.end_index = len(self.image_list)-1 self.index_range = len(self.image_list) self.setup() except: log.err(repr(self), string, font_size, font_path, color, traceback.format_exc(), )
def post(self, project, file): '''upload a text/yaml configuration file to a project''' if (project != "project"): args = parsers.yaml_parser.parse_args() filecontent = args['yaml'] if (allowed_conf_file(file)): try: test = config.ordered_load(filecontent) except: api.abort(400, {file: {"saved": "ko - " + err()}}) try: pfile = os.path.join(config.conf["global"]["projects"][ project]["path"], file) with open(pfile, 'w') as f: f.write(filecontent.encode("utf-8", 'ignore')) response = {file: {"saved": "ok"}} config.read_conf() response[file]["yaml_validator"] = config.conf[ "global"]["projects"][project]["files"][file] return response except: api.abort(400, {file: {"saved": "ko - " + err()}}) else: api.abort(403) else: api.abort(403)
def send(self, command, timeout = 50, rsp1=None, rsp2=None,debug=0): if not rsp1: rsp1 = self.prompt1 if not rsp2: rsp2 = self.prompt2 self.wait_quiet() if self.send_cr: log.stats['cmd'] += 1 self.oai.sendline(command) else: self.oai.send(command) if self.expect_echo: cmd = self.oai.expect_exact([command, pexpect.TIMEOUT], timeout=timeout); if cmd != 0: raise log.err(self.failed(command, command,debug)) if self.expect_response: index = self.oai.expect_exact([rsp1, rsp2, pexpect.TIMEOUT], timeout=timeout) if index == 0 or index == 1: return 'OK' elif index == 2: self.oai.expect_exact([rsp1, rsp2, pexpect.TIMEOUT], timeout=timeout) if self.flag_errors: raise log.err(self.err(command)) else: return 'OK' else: raise log.err(self.failed(command, rsp1 + ' or ' + rsp2,debug))
def apply(self, **config): if config is None or len(config) < 1: raise ConfigError('No options provided') actions = { 'theme': self.change_theme, 'font': self.change_font, 'size': self.change_font_size, 'opacity': self.change_opacity, 'padding': self.change_padding, 'offset': self.change_font_offset, 'list': self.list, 'print': self.print, } errors_found = 0 for param, action in actions.items(): if param in config: try: action(config[param]) except ConfigError as e: log.err(e) errors_found += 1 if errors_found > 0: raise ConfigError(f'\n{errors_found} error(s) found')
def post(self, dataset, action): '''direct search into the dataset''' if ((action == "_search")): try: args = parsers.es_parser.parse_args() ds = Dataset(dataset) query = request.get_json() if (ds.connector.type == "elasticsearch"): try: ds.select = {"query": {"function_score": { "query": query["query"], "random_score": {}}}} except: ds.select = query try: size = args['size'] except: size = ds.chunk try: # hack for speed up an minimal rendering on object resp = original_flask_make_response(json.dumps(ds.connector.es.search( body=ds.select, index=ds.table, doc_type=ds.doc_type, size=size))) resp.headers['Content-Type'] = 'application/json' return resp except: return api.abort(403, err()) else: api.abort(403, "not an elasticsearch dataset") except: return {"status": "ko - " + err()} else: api.abort(403)
def post(self, dataset, id, action): '''elasticsearch update api proxy''' if ((action == "_update")): try: args = parsers.es_parser.parse_args() ds = Dataset(dataset) data = request.get_json() except: return {"status": "ko - " + err()} if (ds.connector.type == "elasticsearch"): try: # hack for speed up an minimal rendering on object resp = original_flask_make_response( json.dumps( ds.connector.es.update(index=ds.table, id=id, body=data))) resp.headers['Content-Type'] = 'application/json' return resp except: return api.abort(403, err()) else: api.abort(403, "not an elasticsearch dataset") else: api.abort(403)
def layer_mouse_leave(self, layer_name): try: for w in self.layer_list[layer_name].get_children(): if isinstance(w, button.Button): w.mouse_leave() except: log.err(repr(self), layer_name, traceback.format_exc())
def __call__(self, master, sock): meta = master.meta master.log("ChunkConnect,%s,%i ids:%s" % (str(self.csid), len(self.ids), str(self.ids))) master.drop_chunkserver(self.csid) master.chunkservers[self.csid] = (sock) for id in self.ids: added = [] master.log("adding info for id %s" % id) for fi in meta.fileinfos.values(): #master.log("checking file %s" % fi.fname) for ci in fi.chunkinfos: if ci.id != id: continue master.log("adding chunkserver %s to file %s(%s)" % (str(self.csid), fi.fname, ci.id)) ci.servers.append(self.csid) added.append(fi.fname) if not len(added): # TODO: save this info for cleanup log.err("failed to find file for chunk id %s" % id) else: # okay for same chunk to be in multiple files #master.log('add chunk %s to file "%s"'%(id,str(added))) pass
def layer_active(self, layer_name): try: l = self.layer_list[layer_name] l.master.remove(l) l.master.put(l, l.real_x, l.real_y) except: log.err(repr(self), layer_name, traceback.format_exc())
def layer_clear(self, layer_name): try: widget_list = self.layer_list[layer_name].get_children() while widget_list: self.widget_del(widget_list.pop()) except: log.err(repr(self), layer_name, traceback.format_exc())
def layer_move(self, layer_name, x, y): try: l = self.layer_list[layer_name] l.set_coord(x, y) l.master.move(l, l.real_x, l.real_y) except: log.err(repr(self), layer_name, traceback.format_exc())
def publish(self, name, servicetype="_http._tcp", TXT="", host="", port=1337, domain="local", af=avahi.PROTO_INET): self.name = name self.servicetype = servicetype self.TXT = TXT self.host = host self.port = port self.domain = domain try: bus = dbus.SystemBus() server = dbus.Interface(bus.get_object(avahi.DBUS_NAME, avahi.DBUS_PATH_SERVER), avahi.DBUS_INTERFACE_SERVER) group = dbus.Interface(bus.get_object(avahi.DBUS_NAME, server.EntryGroupNew()), avahi.DBUS_INTERFACE_ENTRY_GROUP) group.AddService(avahi.IF_UNSPEC, af, dbus.UInt32(0), self.name, self.servicetype, self.domain, self.host, dbus.UInt16(self.port), self.TXT) group.Commit() self.group = group except DBusException as e: err('DBUS error %s' % e.get_dbus_message()) return False info('Service %s published in %s domain)' % (self.name, self.domain)) return True
def getaddress(self, client_address): client_ip = client_address[0] lease = self.checklease(client_ip) if lease: return lease if (self.last_lease): sa = self.last_lease else: sa = self.config_json['start'] ea = self.config_json['end'] if (sa == ea): err('Address pool exhausted!') return None re_addr = re.compile('(\d*).(\d*).(\d*)') tipc_addr = re_addr.search(sa).groups() z = int(tipc_addr[0]) c = int(tipc_addr[1]) n = int(tipc_addr[2]) if self.last_lease: n += 1 sa = "%d.%d.%d" % (z,c,n) self.last_lease = sa lease = '{"clientip" : "%s", "tipcaddress" : "%s"}' % (client_ip, sa) self.lease_json['leases'].append(json.loads(lease)) self.writelease() return json.loads(lease)
def load_text(self, string, font_size=None, font_path=None, color=None): if not font_path: assert os.path.isfile(share.GLOBAL_FONT_PATH) if not string: return try: self.clear() self.text = string for pimg in image_list_from_text(string, self.length_once, self.width, self.height, font_path, font_size, color): img = image.Image(show=False) img.set_image(image.image_from_auto_resize(pimg)) self.image_list.append(img) self.start_index = 0 self.end_index = len(self.image_list) - 1 self.index_range = len(self.image_list) self.setup() except: log.err( repr(self), string, font_size, font_path, color, traceback.format_exc(), )
def get_data_list(self, siteId, country, interest, ref_type): log.info(' -- For interest ' + interest + ' , selection of ' + os.path.join(self.reference_data_path, country, siteId)) #Depending on site, interest select the needed data #Inputs: # 1.siteId # 2.country # 3.interest # 4.ref_type ,ref_type = ['vector, 'raster','dem'] reference_file_list = [] #Access to xml file xmldoc = minidom.parse(self.description_file) refs = xmldoc.getElementsByTagName('ref') for ref in refs: s = (ref.getElementsByTagName('siteId'))[0].childNodes[0].data c = (ref.getElementsByTagName('country'))[0].childNodes[0].data ref_t = (ref.getElementsByTagName('type'))[0].childNodes[0].data ct = ref.getElementsByTagName(interest).length if ((siteId == s) and (country == c) and (ct > 0) and (ref_t == ref_type)): filename = ( ref.getElementsByTagName('filename'))[0].childNodes[0].data output_file = os.path.join(self.reference_data_path, c, s, 'ROI', filename) if os.path.isfile(output_file): reference_file_list.append(output_file) log.info(' -- Check file presence => ok ') print '\n' else: log.err(' -- No ' + ref_type + 'reference file \n ') return reference_file_list
def build(f, any_errors, should_build, add_dep_to=None, delegate=None, re_do=True): if f.dolock(): if f.check_deadlocks(): err("%s: recursive dependency, breaking deadlock\n", f.printable_name()) any_errors[0] += 1 any_errors[1] += 1 else: jwack.get_token(f) f.dolock().waitlock() if any_errors[0] and not vars.KEEP_GOING: return False f.refresh() debug3('think about building %r\n', f.name) dirty = should_build(f) while dirty and dirty != deps.DIRTY: # FIXME: bring back the old (targetname) notation in the output # when we need to do this. And add comments. for t2 in dirty: if not build(t2, any_errors, should_build, delegate, re_do): return False jwack.wait_all() dirty = should_build(f) if dirty: job = BuildJob(f, any_errors, add_dep_to, delegate, re_do) add_dep_to = None job.schedule_job() else: f.dolock().unlock() if add_dep_to: f.refresh() add_dep_to.add_dep(f) return True
def send(self, command,sudo=False, timeout = 50, rsp1=None, rsp2=None,debug=0): if not rsp1: rsp1 = self.prompt1 if not rsp2: rsp2 = self.prompt2 self.wait_quiet() if sudo == True: command = 'echo \'' + '' + '\' | sudo -S -E bash -c \' ' + command + '\' ' if self.send_cr: log.stats['cmd'] += 1 self.oai.sendline(command) else: self.oai.send(command) if self.expect_echo: #cmd = self.oai.expect([re.escape(command), pexpect.TIMEOUT], timeout=timeout); cmd = self.oai.expect_exact([command, pexpect.TIMEOUT], timeout=timeout); if cmd != 0: raise log.err(self.failed(command, command,debug)) if self.expect_response: #index = self.oai.expect([re.escape(rsp1), re.escape(rsp2),'%', pexpect.TIMEOUT], timeout=timeout) index = self.oai.expect_exact([rsp1, rsp2, pexpect.TIMEOUT], timeout=timeout) if index == 0 or index == 1: return 'OK' elif index == 2: #self.oai.expect([re.escape(rsp1), re.escape(rsp2), pexpect.TIMEOUT], timeout=timeout) self.oai.expect_exact([rsp1, rsp2, pexpect.TIMEOUT], timeout=timeout) if self.flag_errors: raise log.err(self.err(command)) else: return 'OK' else: raise log.err(self.failed(command, rsp1 + ' or ' + rsp2,debug))
def savepid (location): if not location: return ownid = os.getpid() flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY mode = ((os.R_OK | os.W_OK) << 6) | (os.R_OK << 3) | os.R_OK try: fd = os.open(location,flags,mode) except OSError: err("PIDfile already exists, not updated %s" % location) return False try: f = os.fdopen(fd,'w') line = "%d\n" % ownid f.write(line) f.close() except IOError: err("Can not create PIDfile %s" % location) return False log("Created PIDfile %s with value %d" % (location,ownid)) atexit.register(removepid,location) return True
def fork_exit (): try: pid = os.fork() if pid > 0: os._exit(0) except OSError, e: err('Can not fork, errno %d : %s' % (e.errno,e.strerror))
def __call__(self,master,sock): meta = master.meta master.log("ChunkConnect,%s,%i ids:%s" % (str(self.csid),len(self.ids),str(self.ids))) master.drop_chunkserver(self.csid) master.chunkservers[self.csid] = (sock) for id in self.ids: added = [] master.log("adding info for id %s" % id) for fi in meta.fileinfos.values(): #master.log("checking file %s" % fi.fname) for ci in fi.chunkinfos: if ci.id != id: continue master.log("adding chunkserver %s to file %s(%s)" % (str(self.csid),fi.fname,ci.id)) ci.servers.append(self.csid) added.append(fi.fname) if not len(added): # TODO: save this info for cleanup log.err("failed to find file for chunk id %s" % id) else: # okay for same chunk to be in multiple files #master.log('add chunk %s to file "%s"'%(id,str(added))) pass
def set_global_font_path(path): try: global GLOBAL_FONT_PATH assert os.path.isfile(path) GLOBAL_FONT_PATH = convert.get_str(path) except: log.err(traceback.format_exc())
def send(self, command, timeout=50, rsp1=None, rsp2=None, debug=0): if not rsp1: rsp1 = self.prompt1 if not rsp2: rsp2 = self.prompt2 self.wait_quiet() if self.send_cr: log.stats['cmd'] += 1 self.oai.sendline(command) else: self.oai.send(command) if self.expect_echo: cmd = self.oai.expect_exact([command, pexpect.TIMEOUT], timeout=timeout) if cmd != 0: raise log.err(self.failed(command, command, debug)) if self.expect_response: index = self.oai.expect_exact([rsp1, rsp2, pexpect.TIMEOUT], timeout=timeout) if index == 0 or index == 1: return 'OK' elif index == 2: self.oai.expect_exact([rsp1, rsp2, pexpect.TIMEOUT], timeout=timeout) if self.flag_errors: raise log.err(self.err(command)) else: return 'OK' else: raise log.err(self.failed(command, rsp1 + ' or ' + rsp2, debug))
def route_upload(path): global chunked_file_handle time = request.args.get('time', type=float, default=0.0) force = request.args.get('force', type=inputs.boolean, default=False) path = safe_join(os.path.join(app.config['fileroot'], path)) if path is None: abort(404) try: content_range = request.environ['HTTP_CONTENT_RANGE'] parsed_ranges = re.search(r'bytes (\d*)-(\d*)\/(\d*)', content_range) _from, _to, _size = [int(x) for x in parsed_ranges.groups()] deb(f'chunked upload, {_from} to {_to} ({_size}), {_to - _from + 1} bytes') except: content_range = None if not content_range or _from == 0: if os.path.exists(path): if not force: # if force was not given then the default is that the server refuses to rewrite an existing file err(f'file {path} already exist, returning 403 (see --force)') return '', 403 else: directory = os.path.dirname(path) if not os.path.exists(directory): inf(f'constructing new path {directory}') Path(directory).mkdir(parents=True, exist_ok=True) if content_range: if _from == 0: try: if chunked_file_handle.get(path): err('internal error in upload, non closed filehandle') chunked_file_handle[path].close() open(path, 'w').close() chunked_file_handle[path] = open(path, "ab") except: pass inf(f'writing file "{path}" ({human_file_size(_size)})') chunked_file_handle[path].write(request.data) if _to == _size - 1: inf(f'{path} transfer complete') chunked_file_handle[path].close() del chunked_file_handle[path] else: # ordinary non-chunked upload, single write inf(f'writing file "{path}"') with open(path, "wb") as fp: fp.write(request.data) if time > 0.0: deb(f'setting {path} time to {time}') os.utime(path, (time, time)) # 201: Created return '', 201
def fork_exit(): try: pid = os.fork() if pid > 0: os._exit(0) except OSError, e: err('Can not fork, errno %d : %s' % (e.errno, e.strerror))
def _flush(self, buf): if self.flushcmd == "redo" and self.recursive: f = buf[:-1] olddepth = vars.DEPTH vars.DEPTH = vars.DEPTH + ' ' try: main([os.path.join(self.target.dirname(), f)]) finally: vars.DEPTH = olddepth self.doing = buf elif self.flushcmd == "redo_done" and self.doing == buf: self.doing = None elif self.flushcmd == "std" and self.doing == None: sys.stdout.write(buf) sys.stdout.flush() elif self.flushcmd == "err" and self.doing == None: sys.stderr.write(buf) sys.stderr.flush() elif self.flushcmd == "log" and not self.recursive: LOGFILE.write(buf) LOGFILE.flush() elif self.flushcmd == "redo_err" and self.recursive: err(" " + buf) elif self.flushcmd == "redo_warn" and self.recursive: warn(" " + buf)
def find_files_with_tag(session, matchbox, paths): """ Look for tags in the data at each file path. This is the main routine for scanning multiple sources for embedded issues. It scans all the paths and resolves any directory to source files. Then each source is read, and another routine called to scan for and process tagged comments. """ for source in sources: try: tag_generator = find_tagged_comments(session, matchbox, source, data) except Exception, e: log.err("Find: %s", e) traceback.print_exc() tag_generator = None while tag_generator: try: tag = tag_generator.next() yield tag except StopIteration, e: tag_generator = None except Exception, e: log.err("Find: %s", e) traceback.print_exc()
def build(self): debug3('running build job for %r\n', self.target.name) (dodir, dofile, basedir, basename, ext) = ( self.dodir, self.dofile, self.dobasedir, self.dobasename, self.doext) # this will run in the dofile's directory, so use only basenames here if vars.OLD_ARGS: arg1 = basename # target name (no extension) arg2 = ext # extension (if any), including leading dot else: arg1 = basename + ext # target name (including extension) arg2 = basename # target name (without extension) argv = ['sh', '-e', dofile, arg1, arg2, # temp output file name os.path.relpath(self.tmpname_arg3, dodir), ] if vars.VERBOSE: argv[1] += 'v' if vars.XTRACE: argv[1] += 'x' if vars.VERBOSE or vars.XTRACE: log_e('\n') firstline = open(os.path.join(dodir, dofile)).readline().strip() if firstline.startswith('#!.../'): _, _, interp_argv = firstline.partition("/") interp_argv = interp_argv.split(' ') interpreter = _find_interpreter(self.dodir, interp_argv[0]) if not interpreter: err('%s unable to find interpreter %s.\n', self.dofile, interp_argv[0]) os._exit(208) self.target.add_dep(state.File(interpreter)) argv[0:2] = [interpreter] + interp_argv[1:] elif firstline.startswith('#!/'): argv[0:2] = firstline[2:].split(' ') log('%s\n', self.target.printable_name()) log_cmd("redo", self.target.name + "\n") try: dn = dodir os.environ['REDO_PWD'] = os.path.join(vars.PWD, dn) os.environ['REDO_TARGET'] = basename + ext os.environ['REDO_DEPTH'] = vars.DEPTH + ' ' if dn: os.chdir(dn) l = logger.Logger(self.log_fd, self.tmp_sout_fd) l.fork() os.close(self.tmp_sout_fd) close_on_exec(1, False) if vars.VERBOSE or vars.XTRACE: log_e('* %s\n' % ' '.join(argv)) os.execvp(argv[0], argv) except: import traceback sys.stderr.write(traceback.format_exc()) err('internal exception - see above\n') raise finally: # returns only if there's an exception (exec in other case) os._exit(127)
def main(): try: alacritty = Alacritty() alacritty.apply(**args()) alacritty.save() except ConfigError as e: log.err(e) exit(1)
def set_coord(self, x, y): try: self.x = int(x) self.y = int(y) self.real_x = int(x*share.WIDTH_ZOOM_SCALE) self.real_y = int(y*share.HEIGHT_ZOOM_SCALE) except: log.err(repr(self), x, y, traceback.format_exc())
def cosm_send(id, value): '''Send data to Cosm.com''' try: cosm.put_data_point(id, value) except urllib2.HTTPError as e: err("Cosm: " + str(e)) except urllib2.URLError as e: err("Cosm: " + str(e))
def _print_cycle(target_list, t): n = len(target_list) for i in xrange(0, n): if target_list[i] == t: break for j in xrange(i, n): err(' %s\n' % target_list[j]) err(' %s\n' % t)
def getSnsUserInfo(token, openid): url = 'https://api.weixin.qq.com/sns/userinfo?access_token=%s&openid=%s&lang=zh_CN'%(token, openid); res = json.loads(urllib.urlopen(url).read()) if res.has_key('errcode'): log.err("Wx get userinfo error: %s", json.dumps(res)) return None else: return res
def find_device(self): for port in list_ports.comports(): if self.name in port.description: return port.device log.err(self.name + ' is not connected') self.list_ports() exit(1)
def set_coord(self, x, y): try: self.x = int(x) self.y = int(y) self.real_x = int(x * share.WIDTH_ZOOM_SCALE) self.real_y = int(y * share.HEIGHT_ZOOM_SCALE) except: log.err(repr(self), x, y, traceback.format_exc())
def find_oscilloscope(self): """ find port when is connected oscilloscope """ for dev in list_ports.comports(): if dev.description == self.name: return dev.device log.err(self.name + ' is not connected') exit(1)
def send_byte(self, byte): try: self.ser.write(bytes((byte,))) except serial.SerialException: log.err('the device was disconnected') os.system('killall ser-term') time.sleep(0.003)
def search_func(): msg = """ What do you want to search by: \t1. Search by Name \t2. Search by Address \t3. Search by City \t4. Search by Vendor Enter the number you want to search by: """ # use check_input function to get "bywhat" we will query the database bywhat = check_input(1, 4, msg) if bywhat == 1: bywhat = "Name" elif bywhat == 2: bywhat = "Address" elif bywhat == 3: bywhat = "City" elif bywhat == 4: bywhat = "Vendor" # get "what" the user wants to find what = raw_input("Search by %s: " % bywhat) # build query string to pass to the database query function q = ("SELECT * FROM `customers` WHERE `%s` REGEXP \'%s\' LIMIT 0 , 30" % (bywhat, what)) # build query string to pass to the database query function but only for Mgmt IP column qIP = ("SELECT `Mgmt IP` FROM `customers` WHERE `%s` REGEXP \'%s\' LIMIT 0 , 30" % (bywhat, what)) # store database query result in "search" variable to later iterate search = db.query(str(q)) # this is just to enumerate the results e = 0 for entry in search: print "----------------------------------------------------" e += 1 print "%d. " % e, print "%s" % entry['Name'], print "\n\tAddress: %s" % entry['Address'], print "\n\tVendor: %s" % entry['Vendor'], print "\n\tCity: %s" % entry['City'], print "\n\tMgmt IP: %s" % entry['Mgmt IP'], # for each iteration, we will use subprocess to ping the 'Mgmt IP' element status = subprocess.call( ['ping', '-c1', '-W10', '-w2', entry['Mgmt IP']], stdout = open(os.devnull, 'wb')) if status == 0: print "is", # this is to colorize the word UP log.infog("UP") else: print "is", # this is to colorize the word DOWN log.err("DOWN") print "----------------------------------------------------" # store the database query for the Mgmt IP column only in a variable called "search_IP" search_IP = list(db.query(str(qIP))) # return this variable to later use it to find out user's desired IP to connect to return search_IP
def layer_del(self, layer_name): try: l = self.layer_list[layer_name] l.master.remove(l) l.master = None l.master_name = "" self.layer_list.pop(layer_name) except: log.err(repr(self), layer_name, traceback.format_exc())
def get_stats(self): url = f'{self.server}/stats' deb(f'getting stats at {url}') response = requests.get(url, verify=self.certificate) if response.status_code != 200: err(f'got {response.status_code} from {self.domain} server at /stats' ) return ErrorCode.SERVER_ERROR, [] return ErrorCode.OK, json.loads(response.text)
def mgmt_connection( sock ): sock.settimeout(.1) notifications=[] uid = "" sessionActive = True closeMgmt = False central = [] while 1: msg = {} if (notifications): if 'FASP' not in notifications[0]: log.err("BAD notif: %s" % notifications[0]) notifications = notifications[1:] continue for i in range( 1, len(notifications) ): try: line = notifications[i].strip() except: continue if line: try: k,v = line.split(": ", 1) except: log.dbg("Bad line: %s" % line) continue msg[k]=v else: fm2 = "%s\n" % "\n".join(notifications) #print "****************\n%s************" % fm2 try: central = centralConnect() except Exception, e: pass for i in central: try: i.sendall(fm2) except Exception, e: pass #notifications=notifications[i+1:] log.dbg("Got message from ASCP %s" % pprint.pformat(msg)) try: activeTransfers[msg['UserStr']][0].put(msg) except: try: # Create two queues, one for messages received from fasp mgmt, # the other for messages to send to fasp mgmt. activeTransfers[msg['UserStr']] = ( Queue.Queue(), Queue.Queue() ) activeTransfers[msg['UserStr']][0].put(msg) uid = msg['UserStr'] except Exception, e: log.info("No UserId specified for transfer, closing FaspMgmt." ) sessionActive = False continue # Parsing notifications until all notifications consumed.
def prepare(self): assert self.target.dolock().owned == state.LOCK_EX self.target.build_starting() self.before_t = _try_stat(self.target.name) newstamp = self.target.read_stamp() if newstamp.is_override_or_missing(self.target): if newstamp.is_missing(): # was marked generated, but is now deleted debug3('oldstamp=%r newstamp=%r\n', self.target.stamp, newstamp) self.target.forget() self.target.refresh() elif vars.OVERWRITE: warn('%s: you modified it; overwrite\n', self.target.printable_name()) else: warn('%s: you modified it; skipping\n', self.target.printable_name()) return 0 if self.target.exists_not_dir() and not self.target.is_generated: # an existing source file that was not generated by us. # This step is mentioned by djb in his notes. # For example, a rule called default.c.do could be used to try # to produce hello.c, but we don't want that to happen if # hello.c was created in advance by the end user. if vars.OVERWRITE: warn('%s: exists and not marked as generated; overwrite.\n', self.target.printable_name()) else: warn('%s: exists and not marked as generated; not redoing.\n', self.target.printable_name()) debug2('-- static (%r)\n', self.target.name) return 0 (self.dodir, self.dofile, self.dobasedir, self.dobasename, self.doext) = _find_do_file(self.target) if not self.dofile: if newstamp.is_missing(): err('no rule to make %r\n', self.target.name) return 1 else: self.target.forget() debug2('-- forget (%r)\n', self.target.name) return 0 # no longer a generated target, but exists, so ok self.outdir = self._mkoutdir() # name connected to stdout self.tmpname_sout = self.target.tmpfilename('out.tmp') # name provided as $3 self.tmpname_arg3 = os.path.join(self.outdir, self.target.basename()) # name for the log file unlink(self.tmpname_sout) unlink(self.tmpname_arg3) self.log_fd = logger.open_log(self.target, truncate=True) self.tmp_sout_fd = os.open(self.tmpname_sout, os.O_CREAT|os.O_RDWR|os.O_EXCL, 0666) close_on_exec(self.tmp_sout_fd, True) self.tmp_sout_f = os.fdopen(self.tmp_sout_fd, 'w+') return None
def parse(self): self.entries = {} try: f = open('/etc/network/interfaces') ss = f.read().splitlines() f.close() except IOError, e: log.err('NetworkPlugin', str(e)) return
def check_lock(): if not os.path.exists(share.LOCK_PATH): create_file(share.LOCK_PATH) if share.MAIN_WINDOW and os.path.exists(share.RESTORE_PATH): remove_file(share.RESTORE_PATH) try: share.MAIN_WINDOW.present() except: log.err("lock.check_lock", traceback.format_exc()) return True
def draw_out_line(self, color="#FFF", width=1): try: self.set_image( image_from_draw_outline( self.get_image(), color, width, ) ) except: log.err(repr(self), color, width, traceback.format_exc())
def widget_active(self, w): try: if isinstance(w, effect.Effect): for i in w.image_list: i.layer.remove(i) i.layer.put(i, i.real_x, i.real_y) else: w.layer.remove(w) w.layer.put(w, w.real_x, w.real_y) except: log.err(repr(self), w, traceback.format_exc())
def main_redo_targets(redo_flavour, targets): import state from log import err if len(targets) != 0: err('%s: no arguments expected.\n', redo_flavour) return 1 for f in state.files(): if f.is_generated and f.exists(): print f.name