def write_object(self, object_, flags): out = Writer.write_object(self, object_, flags) if flags == FLAG_HPP: out[flags] += self.build_type_str(object_) + ';\n' if flags == FLAG_CPP: if object_.is_static: if object_.initial_value is None and self._current_class.type != 'enum': Error.exit(Error.STATIS_MEMBER_SHOULD_HAVE_INITIALISATION, self._current_class.name, object_.name) if self._current_class.type == 'enum': pattern = '{4}{0} {2}::{1}' else: pattern = '{4}{0} {2}::{1} = {3}' pattern += ';\n' modifier = '' if object_.is_const: modifier = 'const ' out[flags] += pattern.format(convert_type(object_.type), object_.name, self._current_class.name, object_.initial_value, modifier) pass return out
def write_function(self, cls, function): global functions_cache if cls.name not in functions_cache: functions_cache[cls.name] = [] if function.name in functions_cache[cls.name]: Error.warning(Error.DUBLICATE_METHODS, cls.name, function.name) return '' functions_cache[cls.name].append(function.name) convert = function.name not in allowed_functions and self.current_class.name != 'DataStorage' out = '''function {0}({1}) __begin__ {2} __end__\n''' if function.is_static: out = 'public static ' + out else: out = 'public ' + out name = function.name args = ', '.join(['$' + x[0] for x in function.args]) ops = '\n'.join(function.operations) if convert: ops = convert_function_to_php(ops, self.parser, args) out = out.format(name, args, ops) return out
def execute(self, event=None): Action.execute(self) #making sure filename isn't "" and that it doesnt have commas if (len(self.filename) == 0): _ = Error("File must have a name") return if ("," in self.filename): _ = Error("Cannot use commas in filename when saving") return #saving items into the file fullfilename = os.getcwd() + '/' + self.filename + '.txt' self.filename = fullfilename f = open(os.path.expanduser(self.filename), "w+") for i, subj in enumerate(self.ScheduleList, 1): f.write(str(i) + "\t") lectext = subj.cd + ',' + subj.dy + ',' + subj.stH + ',' + subj.stM + ',' + subj.enH + ',' + subj.enM f.write(lectext) f.write("\t") if not (subj.SubObj.link == None): tuttext = subj.dy1 + ',' + subj.stH1 + ',' + subj.stM1 + ',' + subj.enH1 + ',' + subj.enM1 f.write(tuttext) else: f.write("None") f.write("\n") f.close() self.filename = "" self.wind.destroy()
def _initNet(self): """ Initialize the network interface configuration """ # # get_if_conf() -- Fetching interface configuration by Automatically # # get_if_conf_ff() -- Fetching interface configuration from config file (/etc/etherwall/etherwall.conf) # return 2 values: # 1. get_if_conf_ff()[0] is return message code number # 0 - no error # 1 - bad parsing # 2 - device not found # 3 - incomplete configuration format # 2. get_if_conf_ff()[1] is return message # if (get_if_conf_ff()[0] == 0): # if the etherwall running with specified configuration if (get_if_conf_ff()[1]['manual'] == "yes"): self.iface = get_if_conf_ff()[1]['iface'] self.logger.info("Listening on %s..." % (self.iface)) self.mymac = get_if_conf_ff()[1]['hwaddr'].lower() self.myip = get_if_conf_ff()[1]['ipaddr'] self.gw = get_if_conf_ff()[1]['gwaddr'] self.gwmac = get_if_conf_ff()[1]['gwhwaddr'].lower() self.cidr = get_if_conf_ff()[1]['cidr'] self.promisc = get_if_conf_ff()[1]['promisc'] self.msgbox = get_if_conf_ff()[1]['msgbox'] if not check_if_up(iface=self.iface): # ERROR: Interface is down Error("'Etherwall - Daemon Stopped'","'Interface: %s is down.'" % (self.iface),self.logger) else: # if the etherwall running with the automatic configuration detection if get_if_conf(): self.iface = get_if_conf()[0] self.logger.info("Listening on %s..." % (self.iface)) self.mymac = get_if_conf()[1] self.myip = get_if_conf()[2] self.gw = get_if_conf()[3] self.cidr = get_if_conf()[5] self.gwmac = self.getGwMac() self.promisc = get_if_conf_ff()[1]['promisc'] self.msgbox = get_if_conf_ff()[1]['msgbox'] else: # ERROR: No Device Up or No IPv4 address assigned Error("'Etherwall - Daemon Stopped'","'Interface: No Device Up or No IPv4 address assigned.'",self.logger) elif (get_if_conf_ff()[0] == 1): # ERROR: Bad parsing Error("'Etherwall - Daemon Stopped'","'%s.'" % (get_if_conf_ff()[1]),self.logger) elif (get_if_conf_ff()[0] == 2): # ERROR: Device not found Error("'Etherwall - Daemon Stopped'","'%s.'" % (get_if_conf_ff()[1]),self.logger) alert.start() time.sleep(3) elif (get_if_conf_ff()[0] == 3): # ERROR: Incomplete configuration format Error("'Etherwall - Daemon Stopped'","'%s.'" % (get_if_conf_ff()[1]),self.logger)
def _build_serialize_operation(self, obj_name, obj_type, obj_value, obj_is_pointer, obj_template_args, serialization_type, is_link=False): index = 0 if obj_value is None: index = 1 type_ = obj_type if self.parser.find_class(type_) and self.parser.find_class( type_).type == 'enum': string = self._build_serialize_operation_enum( obj_name, obj_type, obj_value, obj_is_pointer, obj_template_args, serialization_type) else: if obj_type not in self.simple_types and type_ != 'list' and type_ != 'map': if is_link: type_ = 'link' elif obj_is_pointer: type_ = 'pointer' else: type_ = 'serialized' template_args = list() if len(obj_template_args) > 0: if type_ == 'map': if len(obj_template_args) != 2: Error.exit(Error.MAP_TWO_ARGS, self._current_class.name, obj_name) if serialization_type == SERIALIZATION: return self.build_map_serialization( obj_name, obj_type, obj_value, obj_is_pointer, obj_template_args) if serialization_type == DESERIALIZATION: return self.build_map_deserialization( obj_name, obj_type, obj_value, obj_is_pointer, obj_template_args) else: arg = obj_template_args[0] arg_type = arg.name if isinstance(arg, Class) else arg.type template_args.append(convert_type(arg_type)) if arg.is_link: type_ = 'list<link>' elif arg_type in self.simple_types: type_ = '{0}<{1}>'.format(type_, arg_type) elif arg.is_pointer: type_ = 'pointer_list' else: type_ = '{0}<serialized>'.format(type_) if type_ not in self.serialize_protocol[serialization_type]: Error.exit(Error.UNKNOWN_SERIALISED_TYPE, type_, obj_type) pattern = self.serialize_protocol[serialization_type][type_][index] string = pattern.format(obj_name, convert_type(obj_type), obj_value, '{', '}', *template_args) return string
def p_error(p): if p: print("Error sintáctico en el token =", p.type, 'L:', p.lineno) # Just discard the token and tell the parser it's okay. error = Error('Error en el token '+ str(p.type), str(p.lineno), 0) parser.errok() else: error = Error('Error al final del archivo', 0, 0) pass syntacticErrors.add(error)
def execute(self, parent, enviroment): # Siempre se va a recibir un CREATE_TABLE como nodo # 1. Recorrer y ejecutar cada uno de los nodos. # 2. Si llegó hasta el final, quiere decir que no hay ningun error # 3. Verificar que esté seleccionada una base de datos, si lo está crear la tabla. # 4. Verificar que no exista la tabla en la base de datos seleccionada for hijo in parent.hijos: if hijo.nombreNodo == "IDENTIFICADOR": self.name = hijo.valor.upper() elif hijo.nombreNodo == "ATRIBUTO_COLUMNA": nuevaColumna = Column() resp = nuevaColumna.crearColumna(hijo, self.checkers, self.listaids) if resp.code != "00000": return resp self.columnas.append(nuevaColumna) elif hijo.nombreNodo == "ATRIBUTO_PRIMARY_KEY": nuevaConstraint = Constraint() resp = nuevaConstraint.crearConstraintPrimary( hijo, self.columnas) if resp.code != "00000": return resp elif hijo.nombreNodo == "ATRIBUTO_REFERENCES": nuevaConstraint = Constraint() resp = nuevaConstraint.crearConstraintReferences( hijo, self.columnas) if resp.code != "00000": return resp self.listaReferences.append(nuevaConstraint) elif hijo.nombreNodo == "OPCIONALES_ATRIBUTO_CHECK": nuevaConstraint = Constraint() resp = nuevaConstraint.crearConstraintCheck( hijo, self.listaids, self.columnas, self.checkers) if resp.code != "00000": return resp with open('src/Config/Config.json') as file: config = json.load(file) if config['databaseIndex'] == None: err_resp = Error(3, "No se ha seleccionado ninguna base de datos.", -1) resp = Response("42P12", err_resp) return resp listTables = showTables(config['databaseIndex'].upper()) if self.name.upper() in listTables: err_resp = Error(3, "Ya existe la tabla.", -1) resp = Response("42P12", err_resp) return resp createTable(config['databaseIndex'].upper(), self.name.upper(), len(self.columnas)) tc.createTable(config['databaseIndex'].upper(), self.name.upper(), self.columnas) resp = Response("00000", "Se creó la tabla") return resp
def execute(self, parent, enviroment): # OBTENER TABLAS CAMPOS VALORES for hijo in parent.hijos: if "TABLE" == hijo.nombreNodo: self.name = hijo.valor elif "ADD" == hijo.nombreNodo: nuevaCol = Column() self.addCol = 1 for h in hijo.hijos: if "COLUMN" == h.nombreNodo: nuevaCol.name = h.valor else: print('none ', h.nombreNodo) nuevaCol.type = nuevaCol.asignarTipoCol( h.nombreNodo.upper()) self.columnas.append(nuevaCol) # INFO DB ACTUAL with open('src/Config/Config.json') as file: config = json.load(file) if config['databaseIndex'] == None: err_resp = Error(3, "No se ha seleccionado ninguna base de datos.", -1) resp = Response("42P12", err_resp) return resp listTables = showTables(config['databaseIndex'].upper()) useDB = config['databaseIndex'].upper() if not (self.name.upper() in listTables): err_resp = Error(3, "No existe la tabla en la DB.", -1) resp = Response("42P12", err_resp) return resp # INFO A INSERTAR print('DB ACTUAL', useDB) print('Tabla', self.name) # VALIDACIONES if (self.addCol == 1): # INSERTAR NUEVA COL res = alterAddColumn(useDB, self.name.upper(), 1) res = 0 print('insert col :', res) if res == 0: for columna in self.columnas: tc.CreateColumn(useDB, self.name.upper(), columna) resp = Response( "00000", "Se agrego la columna a la tabla " + self.name.upper()) else: resp = Response( "42P12", "No se agrego la columna a la tabla " + self.name.upper()) self.addCol = 0 return resp
def _startEtherWall(self): """ Starting Etherwall """ # adding router/gateway to ARP cache table as static ARP self.logger.info("Adding static entry for the Gateway: %s %s" % (self.gw, self.gwmac)) if os.system("arp -s %s %s" % (self.gw, self.gwmac)): # ERROR: Couldn't add the static entry for the Gateway Error("'Etherwall - Daemon Stopped'","'Couldn't add the static entry for the Gateway.'",self.logger) # append gateway & another subnet/segment to chain app_gw_to_chain(gw=self.gw, mac=self.gwmac) app_another_subnet("%s/%s" % (self.myip,self.cidr)) # adding the specified host to ARP cache tables as static ARP # and append host to chain. self.allow_host = {} if (imp_allow_host()[0] == 0): if imp_allow_host()[1]: for host in imp_allow_host()[1]: if (host.split()[1] == ("%s" % (self.mymac)) or host.split()[1] == ("%s" % (self.gwmac))): self.logger.critical("Forbidden: '%s': The MAC address same with your MAC & Gateway" % (host)) else: self.logger.info("Adding static entry for the Host: %s" % (host)) if os.system("arp -s %s" % (host)): self.logger.critical("Couldn't add the static entry, '%s' outside of subnet" % (host.split()[0])) else: # append host to chain app_host_to_chain(ip=host.split()[0], mac=host.split()[1]) self.allow_host['%s' % (host.split()[0])] = "%s" % (host.split()[1]) else: # ERROR: Bad parsing Error("'Etherwall - Daemon Stopped'","'%s.'" % (imp_allow_host()[1]),self.logger) # sniffing mode if (self.promisc == "no"): self.logger.info("Device %s left promiscuous mode..." % (self.iface)) scapy.all.conf.sniff_promisc = 0 else: self.logger.info("Device %s entered promiscuous mode..." % (self.iface)) # starting realtime protection try: self.logger.info("Starting Realtime Protection...") arpmon = ArpMon(myip=self.myip, mymac=self.mymac, gw=self.gw, gwmac=self.gwmac, iface=self.iface, cidr=self.cidr, logger=self.logger, allow_host=self.allow_host, msgbox=self.msgbox) arpmon._startArpMon() except: if not check_if_up(iface=self.iface): # interface down # ERROR: interface error Error("'Etherwall - Daemon Stopped'","'Interface: The interface %s went down.'" % (self.iface), self.logger) else: # ERROR: uknown/unexpected error Error("'Etherwall - Daemon Stopped'","'Unexpected Error: %s'" % (sys.exc_info()[0]),self.logger)
def groups(self): while True: next_token = self.__tokens.seek() if next_token.typ == 'BEGIN': self.group() elif next_token.typ == 'EOF': print('[Finished parsing %s]' % self.__filename) break else: Error.print_error(Error.message( self.__filename, next_token.line, next_token.column, 'not separator: %s' % next_token.value )) break
def reg_sighandler(self, debug: bool = False, buf: Type.BufT = Type.BufT.DEBUG) -> None: if debug: Printer.Printer.inst().buf( Printer.Printer.inst().f_title('signal handler info'), buf) Printer.Printer.inst().buf('@target: ', buf, indent=2) for i in range(len(self.__sig_handler)): Printer.Printer.inst().buf( f'[{i}] {self.__sig_handler[i].brief}({self.__sig_handler[i].sig})', buf, indent=4) Printer.Printer.inst().buf_newline(buf) Printer.Printer.inst().buf( Printer.Printer.inst().f_title('registering signal handler'), buf) for handler in self.__sig_handler: Printer.Printer.inst().buf( Printer.Printer.inst().f_prog( f'Registering {handler.brief} handler'), buf, False, 2) try: signal.signal(handler.sig, handler.handler) except OSError as os_err: Printer.Printer.inst().buf( Printer.Printer.inst().f_col('fail', Type.Col.RED), buf) raise Error.SysErr(Type.SysErrT.REG_FAIL, sig=handler.brief, err_str=os_err.strerror) else: Printer.Printer.inst().buf( Printer.Printer.inst().f_col('done', Type.Col.BLUE), buf) Printer.Printer.inst().buf(f'@handler: {handler.handler}', buf, indent=4) Printer.Printer.inst().buf_newline(buf) else: for handler in self.__sig_handler: try: signal.signal(handler.sig, handler.handler) except OSError as os_err: raise Error.SysErr(Type.SysErrT.REG_FAIL, sig=handler.brief, err_str=os_err.strerror)
def _create_class(self, text): body, header, text = find_body(text) cls = Class() cls.parse(header) if not self.is_side(cls.side): if cls.is_storage: self.classes_for_data.append(cls) return text cls.parse_body(Parser(self.side), body) if self.find_class(cls.name): Error.exit(Error.DUBLICATE_CLASS, cls.name) self.classes.append(cls) return text
def group(self): token = self.__tokens.next() snippet_types = { '---constant---', '---class-method---', '---instance-method---', '---private-method---', '---define-method---' } if token.value in snippet_types: self.snips(token.value) elif token.value == '---EOF---': print('eof') else: Error.print_error(Error.message( self.__filename, token.line, token.column, 'invalid separator: %s' % token.value ))
def snip(self, snippet_type, tag=''): token = self.__tokens.next() if token.typ == 'SNIPPET': print('snippet: (%s) %s %s' % (snippet_type, token.value, tag)) self.__make_file( # filename=('%s.sublime-snippet' % (token.value)), # snippet_type=snippet_type, # value=token.value, # tag=tag, Snippet(token.value, snippet_type, token.value, tag) ) else: Error.print_error(Error.message( self.__filename, token.line, token.column, 'not snippet: %s' % token.value ))
def __getitem__(self, key): retValue = None error = None if key is not None: dynamickey = key if '.' in key: dynamickey = key.split('.')[0] if dynamickey in self.properties: values = self.properties[dynamickey] if not isinstance(values, list): values = [values] if dynamickey != key: dynamickeyattributes = key.split('.')[1:] for attribute in dynamickeyattributes: newvalues = None for i, value in enumerate(values): if value: if attribute in value: if not newvalues: newvalues = [] newvalues.append(value.get(attribute)) else: error = Error( type=Error.MISSING_DYNAMIC_VALUE, message="No key: '" + str(attribute) + "' in object: " + dynamickey + " for dynamic key: '" + str(key)) break values = newvalues retValue = values return Property(key, retValue, error)
def __init__(self, seeds, done_que, run_que): self.showpercounts = 10 self.timeout = 5 self.starttime = time.time() self.oldtime = 0 self.quit = 0 self.https_enable = 0 self.run_que = run_que self.done_que = done_que self.tasks = [] self.done = 1 self.errdone = set() self.err = Error() self.loadstate() self.blacklist = set( ('.blog.', '.taobao.com', '.baidu.com', '.edu', '.gov', '.mil', 'mail', '.google', 'weibo.com', 't.cn', 'wikipedia', 'facebook', 'twitter', 'dropbox')) self.allowdDomain = set(('com', 'net', 'org', 'cn', 'info', 'biz', 'me', 'name', 'cc', 'tv')) self.httpget = self.httpget_requests # down method self.httpget_requests | httpget_curl self.poolsize = 60 self.poolmaxfree = 20 self.freecount = 0 self.down_pool = Pool(size=self.poolsize) self.totalnettime = 0 self.cbcputime = 0 self.totaldownsize = 0 self.curspeed = 0 self.debugnosave = 1 self.tt = 1 self.done_sites_fname = 'done_sites.bin' try: self.bfdone = BloomFilter.open(self.done_sites_fname) except: self.bfdone = BloomFilter(2**23, 10**(-5), self.done_sites_fname) #8M if self.run_que.qsize() == 0: for seed in seeds: self.run_que.put(seed.split("http://")[1]) if self.https_enable == 0: self.urlpatern = re.compile(r'href=["\']http://([^/?#\"\']+)', re.I) else: self.urlpatern = re.compile(r'href=["\']http[s]?://([^/?#\"\'"]+)', re.I)
def __getToken(self): if ((self.__expiration - time.time()) > 5 * 60): return self.__token discoveryUrl = requests.get( self.__url + "/identity/.well-known/openid-configuration", headers={"Accept": "application/json"}) if discoveryUrl.status_code < 200 or discoveryUrl.status_code >= 300: discoveryUrl.close() raise Error( "Failed to get access token endpoint from discovery URL: {status}:{reason}" .format(status=discoveryUrl.status_code, reason=discoveryUrl.text)) tokenEndpoint = json.loads(discoveryUrl.content)["token_endpoint"] tokenInformation = requests.post(tokenEndpoint, data={ "client_id": self.__clientId, "client_secret": self.__clientSecret, "grant_type": "client_credentials" }) token = json.loads(tokenInformation.content) if token is None: raise Exception("Failed to retrieve Token") self.__expiration = float(token['expires_in']) + time.time() self.__token = token['access_token'] return self.__token
def crearConstraintCheck(self, parent, listaids, columns, checkers): # Pasos para crear un Check # Siempre se recibe un nodo OPCIONALES_ATRIBUTO_CHECK # 1. Recorrer los nodos hijos. # 2. Si hay un nodo OPCIONAL_CONSTRAINT se encuentra el nombre de la constraint check (opcional) # 3. Si hay un nodo LISTA_EXP se encontrarán los id's que serán los ids que hay que buscar. # 4. Dentro de la lista de ID's verificar que todos vengan en la lista de columnas # 5. Si todas están, insertar. nombreCheck = "" for hijo in parent.hijos: if hijo.nombreNodo == "OPCIONAL_CONSTRAINT": nombreCheck = hijo.hijos[0].valor.upper() elif hijo.nombreNodo == "LISTA_EXP": for exp in hijo.hijos: nuevaConstraint = Check() nuevaConstraint.name = nombreCheck.upper() nuevaConstraint.checkExp = self.construirExpresionJSON( exp, listaids) checkers.append(nuevaConstraint) for ident in listaids: if not self.buscarColumna(columns, ident): respError = Error( 3, "No se encuentra la columna " + ident + " en la tabla", -1) responseMessage = Response("20000", respError) return responseMessage responseMessage = Response("00000", "Se creo la constraint correctamente") return responseMessage
def translateVariable(i): global tsStack global result # get variable $tn value and set it to result auxStack = [] found = False while len(tsStack) > 0: ts = tsStack.pop() auxStack.append(ts) if ts.isSymbolInTable(i.id): #var is in ts var3dName = ts.get( i.id).varName #varName is the 3D name for the variable result.temp = var3dName #print('variable encontrada', var3dName) found = True break if len(auxStack) > 0: while True: tsStack.append(auxStack.pop()) if len(auxStack) == 0: break if (not found): print("Traduciendo variable. No existe la variable: ", i.id) e = Error('No exite la variable utilizada' + str(i.id) + '.', 0, 0) global semanticErrors semanticErrors.add(e)
def __init__(self): self.showpercounts = 10 self.timeout = 20 self.poolsize = 100 self.down_pool = Pool(size=self.poolsize) self.run_que = RedisQueueConnection('running').conn self.doneque = RedisQueueConnection('robots').conn self.tempque = Queue() self.done = 1 self.sent = 0 self.quit = False self.err = Error() self.https_enable = 0 self.httpget = self.httpget_requests # down method self.httpget_requests | httpget_curl self.totalnettime = 0 self.totaldownsize = 0 self.starttime = time() self.ip = getip() self.headers = { 'Accept-Language': 'zh-CN,zh;q=0.8,zh-TW;q=0.6', 'Accept-Encoding': 'gzip,deflate', 'Connection': 'close', 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36' }
def LoadInputImages(self): self.gui.UpdateLabelWidget(self.runStatusLabel, "Status: Loading known faces...") imageFiles = self.fileHandler.ListDirectory(self.directorySettings.knownPeopleDirectory) if(len(imageFiles) <= 0): raise Error(f"No input images found, no new individuals will be able to be identified other than already-known encodings.") for imageFile in imageFiles: fileInformation = imageFile.split(".") if(fileInformation[1].lower() in self.supportedFileFormats): if(any(fileInformation[0] in identifier for identifier in list(self.knownPeople.keys()))): print(f"Skipped loading file {fileInformation[0]} because an encoding is already known of this individual") continue print(f"New image found, analysing image {fileInformation[0]} and saving to face encoding database") loadedImage = face_recognition.load_image_file(self.directorySettings.knownPeopleDirectory + "/"+ imageFile) faceLocations = face_recognition.face_locations(loadedImage, model="cnn", number_of_times_to_upsample=0) faceEncodings = face_recognition.face_encodings(loadedImage, known_face_locations=faceLocations) number = 1 for face in faceEncodings: name = f"{fileInformation[0]}-{number}" self.knownPeople[name] = face number += 1 self.SaveKnownPeople(self.knownPeople) else: continue self.gui.EnableButton(self.realTimeButton)
def payment(): errorRate = random.randint(0, 99) if errorRate < ERROR_RATE_THRESHOLD: logs('Payment', 'Checkout operation failed - Service Unavailable: 503') raise Error('Checkout Failed - Service Unavailable', status_code=503) else: with tracer.start_as_current_span("checkout"): rawData = request.form data = {} for itemId in rawData.keys(): data[itemId] = sum( [-val for val in rawData.getlist(itemId, type=int)]) soldInventorySession = requests.Session() soldInventorySession.mount("http://", HTTPAdapter(max_retries=retry_strategy)) soldInventoryUpdateResponse = soldInventorySession.post( "http://{}:8082/update_inventory".format(INVENTORY), data=data, ) soldInventorySession.close() if soldInventoryUpdateResponse.status_code == 200: logs('Payment', 'Customer successfully checked out cart') return "success" else: failedItems = soldInventoryUpdateResponse.json().get( "failed_items") return make_response( "Failed to checkout following items: {}".format( ','.join(failedItems)), soldInventoryUpdateResponse.status_code)
class Generator: error = Error() def __init__(self): print("init generators") def choise(self): print("Generator choise") generators = {1: "Hydrate generator", 2: "Battery", 3: "Prosumer"} while (True): print("Choise what do you want") for i in generators: print i, "-", generators[i] print("0 - exit") choise = input("Enter your choise: ") os.system('clear') if choise == 0: return if choise == 1: hydrate.choise() if choise == 2: battery.choise() if choise == 3: prosumer.choise() if choise > 3 or choise < 0: error.log("generator")
def __init__(self, done_que): self.showpercounts = 100 self.timeout = 5 self.starttime = time() self.quit = False self.run_que = RedisQueueConnection('running').conn self.done_que = done_que self.tasks = [] self.done = 1 self.errdone = set() self.err = Error() self.https_enable = 0 self.httpget = self.httpget_requests # down method self.httpget_requests | httpget_curl self.poolsize = 100 self.down_pool = Pool(size=self.poolsize) self.totalnettime = 0 self.totaldownsize = 0 self.ip = getip()
def update_inventory(): errorRate = random.randint(0,99) if errorRate < UPDATE_ERROR_RATE_THRESHOLD: logs('Inventory', 'Update operation failed - Service Unavailable: 503') raise Error('Update Inventory Failed - Service Unavailable', status_code=503) else: with tracer.start_as_current_span("update_inventory"): rawData = request.form failedItems = [] for itemId in rawData.keys(): qty = sum([val for val in rawData.getlist(itemId, type=int)]) databaseResponse = post( "http://{}:8083/update_item".format(DATABASE), data={"ItemId": itemId, "Qty": qty}) if databaseResponse.status_code != 200: failedItems.append(itemId) if len(failedItems)>0: response = jsonify( {"failed_items": failedItems} ) response.status_code = 206 response.status = "{} {}".format( response.status_code, "Update inventory response contains failed items." ) return response else: logs('Inventory', 'Update operations successful') return jsonify({"failed_items": []})
def execute(self, event=None): Action.execute(self) self.generate_dict() self.generate_schedule() #checking that the user enters a name if (len(self.filename) == 0): _ = Error("File must have a name") return # Opens file and prints Schedules inside it fullfilename = os.getcwd() + '/' + self.filename + '.txt' self.filename = fullfilename f = open(os.path.expanduser(self.filename), "w+") for i, Sched in enumerate(self.ScheduleList, 1): f.write('-' * 25 + ' Schedule ( ' + str(i) + ' ) ' + '-' * 25) f.write("\n") for key, val in Sched.days.items(): line = key + " : " + str(val) f.write(line) f.write("\n") self.filename = "" f.close() self.wind.destroy()
def getParams(self): self.first = self.first.split(" ") self.second = self.second.split(" ") try: if len(self.first) > 3 or len(self.second) > 3: raise IndexError if len(self.first) < 3 or len(self.second) < 3: raise Error("Podano za mało elementów") a = [float(self.first[0]), float(self.second[0])] b = [float(self.first[1]), float(self.second[1])] c = [float(self.first[2]), float(self.second[2])] logging.info("Parametry poczatkowe: " + str([a, b, c])) except ValueError: raise Error("Jeden z argumentów nie jest liczbą!") except IndexError: raise Error("Podano za dużo elementów") return [a, b, c]
def getDataSource(self): if self.dataSource: print(self.dataSource) return self.dataSource else: raise Error( "[CSVReader] '%s' doesn't contain valid params. Please checking it again." % (self.filename))
def t_error(t): global lexicalErrors #print("Illegal characters!") t.lexer.skip(1) error = Error('Caracter no permitido: ' + str(t.value[0]), t.lineno, 0) print('Error léxico. Caracter no permitido: ' + str(t.value[0]), t.lineno) lexicalErrors.add(error)
def get_live_searches(self): ''' Returns list of LiveSearch object ''' live_searches_lists = [] try: live_searches = self._get_allowed_data('livesearches') if live_searches.get("success"): for live_search in live_searches["livesearches"]: live_searches_lists.append(LiveSearch(live_search["life_id"],live_search["searchname"],live_search["query"]))#,"livesearch")) return live_searches_lists else: return Error(live_searches["message"]) except Exception, e: return Error(str(e))
def __init__(self, seeds): self.showpercounts = 10 self.timeout = 5 self.starttime = time.time() self.oldtime = 0 self.quit = 0 self.https_enable = 0 self.run_queue = Queue() self.tasks = [] self.done = 0 self.errdone = set() self.err = Error() self.loadstate() #self.whitelist = ['html','htm','php','shtml','asp','jsp','do','action','aspx'] self.blacklist = set( ('.blog.', '.taobao.com', '.baidu.com', '.edu', '.gov', '.mil', 'mail', '.google', 'weibo.com', 't.cn', 'worldpress.com', 'blogspot.com', 'youtube', 'wikipedia', 'facebook', 'twitter', 'dropbox')) self.allowdDomain = set(('com', 'net', 'org', 'cn', 'info', 'biz', 'me', 'name', 'cc', 'tv')) self.httpget = self.httpget_requests # down method self.httpget_requests | httpget_curl self.poolsize = 100 self.poolmaxfree = 40 self.freecount = 0 self.down_pool = Pool(size=self.poolsize) self.mutex = gevent.coros.RLock() self.totalnettime = 0 self.cbcputime = 0 self.totaldownsize = 0 self.curspeed = 0 self.debugnosave = 1 try: self.bfdone = BloomFilter.open('done_sites.bin') except: self.bfdone = BloomFilter(2**23, 10**(-5), 'done_sites.bin') if self.run_queue.qsize() == 0: for seed in seeds: self.run_queue.put(seed.split("http://")[1]) if self.https_enable == 0: self.urlpatern = re.compile('href=[\"\']http://([^/?#\"\']+)') else: self.urlpatern = re.compile('href=[\"\']http[s]?://([^/?#\"\'"]+)')
def getColumn(self, columnName): if columnName in self.fieldNames: column = [item[columnName] for item in self.dataSource] column = CSVReader._formatArray(column) print(column) return column else: raise Error("[CSVReader] '%s' is not exist in '%s' file" % (columnName, self.filename))
def buildTransitions(self): for which in range(0, len(self.transitions)): transition = self.transitions[which].split(':') stateNum = int(transition[0]) if (self.validateTransition(transition)): self.states[stateNum].addTransition( transition[2], self.states[int(transition[1])]) else: self.errors.append(Error("Transitions are not valid."))