def i_get_the_first_search_record_containing_keyword( self, keyword ): assert isinstance( self.search_results_page, SearchResultsPage ), 'The search results page is displayed' bag_of_keywords = str.split( self.search_results_page.text_of_first_record.lower() ) if (bag_of_keywords[0] in keyword.lower()) or ( bag_of_keywords[-1] in keyword.lower() ): logger.info( 'Succeed on searching keyword within bing.com', # html=True ) else: logger.error( '"{}" not in "{}"'.format( keyword, self.search_results_page.text_of_first_record ), html=True )
def reset_android(self, pro_alias=None, remote_url=None ): """重置android设备""" # 重启android设备 ret_reb = os.popen("adb shell reboot").read() is_null = (len(ret_reb) == 0) if is_null: logger.info(self._getcurtm()+": Succeed rebooting android device.", also_console=True) else: logger.error(ret_reb) return -1 # 等待android设备重新连接 ret_val = subprocess.Popen("adb wait-for-device", shell=True) ret_val.wait() if ret_val.stdout is None: ret_dev = os.popen('adb devices').read() logger.info(self._getcurtm() + ": Android device named: "+ ret_dev.split('\n')[1].split('\t')[0] + " connected.", also_console=True) time.sleep(15) logger.info(self._getcurtm() + ": Sleep 10 to wait return.", also_console=True) return None else: # logger.error(pro_alias + " "+ test_str(adbPid[4]) + " process fail to kill!") logger.error(self._getcurtm() + ": Android device connected timeout.") return -1
def image_self_check(self): if os.path.isfile(self.get_convert_path): logger.info("Convert file exits path, path used :" + self.get_convert_path) else: message = "Missing file convert.exe" raise AssertionError(message) if os.path.isfile(self.get_compare_path): logger.info("Compare file exits , path used :" + self.get_compare_path) else: message = "Missing file compare.exe" raise AssertionError(message) if os.path.isfile(self.get_identify_path): logger.info("Identify file exits, path used :" + self.get_identify_path) else: message = "Missing file identify.exe" raise AssertionError(message) argument_list = [self.get_convert_path, "--version"] try: procces = subprocess.Popen(argument_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # , shell=True procces.wait() output = procces.communicate() logger.info("returnCode \t:" + str(procces.returncode)) if procces.returncode != 0: message = "processing failed msg= %s" % str(output) raise AssertionError(message) logger.info("stdout \t:" + str(output[0])) logger.info("stderr \t:" + str(output[1])) except OSError as e: logger.error(e)
def ConfigureCollector(self): self.LoginTo(self.collector_ip) logger.info("Now installing rpms and configuring Collector on %s"%self.collector_ip, also_console=True) ssh.write("id") line = ssh.read_until_prompt() logger.console(line) ssh.write("yum install -y reflex-collector") output = ssh.read_until_prompt(loglevel="INFO") for line in output.splitlines(): line = line.strip(" ") if line.find("already installed and latest version") != -1: logger.console("reflex-collector already installed", newline="yes", stream="stdout") elif line.find("failed") != -1: logger.console("reflex-collector failed during installation", newline="yes", stream="stdout") logger.error("failure", html=True) elif line.find("Error: Package:") != -1: m1 = re.search('Error: Package:\s.*\s\(') if m1: package = m1.group() logger.console(line, newline="yes", stream="stdout") logger.error(package, html=True) logger.info("Reflex-collector installed successfully",also_console=True) #check reflex rpm also. Not remembered why ?? self.SwitchToReflex() self.ReflexKeysGeneration() self.cliPrompt() ssh.write("show pm process collector") output = ssh.read_until_prompt(loglevel="INFO") logger.console(output)
def _quit(self, driver, error): try: driver.quit() except Exception as exception: logger.error('When closing browser, received exception: %s' % exception) error = exception return error
def clico_boton_iniciar_sesion(self): """ Hace click sobre el botón 'Iniciar Sesion' ubicado en la cabecera de la página principal """ try: boton_sesion = self.driver.find_element_by_xpath("//span[@class='icono iniciarSesion']") boton_sesion.click() except (TimeoutException, NoSuchElementException) as e: logger.error(ELEMENT_NOT_FOUND.format("Icono iniciar sesion")) raise Exception(e.msg)
def clico_sobre_boton_descargar(self): """ Clica directamente sobre el botón descargar """ try: area_detalle = self.driver.find_element_by_xpath("//div[@class='listadoElementos programas grande']") area_detalle.click() except (TimeoutException, NoSuchElementException) as e: logger.error(ELEMENT_NOT_FOUND.format("Descargar")) raise Exception(e.msg)
def aparece_mensaje_texto_que_dice(self, texto): """ Busca una coincidencia con el mensaje que aparece justo después de la cabecera """ try: mensaje = self.driver.find_element_by_xpath("//h1") assert texto in mensaje.text, "'%s' no coincide con el texto encontrado '%s'" %(texto, mensaje) except (TimeoutException, NoSuchElementException) as e: logger.error(ELEMENT_NOT_FOUND.format('Texto: ' + texto)) raise Exception(e.msg)
def clico_sobre_link_con_texto(self, nombre_link): """ Clica sobre el link con el texto que indica el usuario """ try: enlace = self.driver.find_element_by_link_text(nombre_link) enlace.click() except (TimeoutException, NoSuchElementException) as e: logger.error(ELEMENT_NOT_FOUND.format('Link: ' + nombre_link)) raise Exception(e.msg)
def activo_el_check_con_nombre(self, nombre): """ Activa un check dado su nombre """ try: checkbox = self.driver.find_element_by_name(nombre) checkbox.click() except (TimeoutException, NoSuchElementException) as e: logger.error(ELEMENT_NOT_FOUND.format('Name: ' + nombre)) raise Exception(e.msg)
def check_if_key_not_exits(self, db_name, proto_id): global mom global proto db = db_name_convert(db_name) if mom[db_name].exists(db, proto[proto_id]) == 0: logger.error("Key " + proto_id + " exist in Redis.") raise AssertionError
def connect_to_database(self, dbServer, dbUser, dbPass, dbDatabase, dbPort=1433, dbQTimeout=0, dbLoginTimeout=60, charSet='UTF-8'): ''' Connect to database. Must be called before calling any query actions. ''' try: self._dbconnection = pymssql.connect(server=dbServer, user=dbUser, password=dbPass, database=dbDatabase, port=int(dbPort), timeout=int(dbQTimeout), login_timeout=int(dbLoginTimeout), charset=charSet, as_dict=False) return self._dbconnection except Exception as e: logger.error(e)
def clico_en_salir(self): """ Cierra sesión de usuario mediante URL """ try: url = 'http://www.portalprogramas.com/comunidad/cerrarSesion' self.driver.get(url) except (TimeoutException, NoSuchElementException) as e: logger.error(ELEMENT_NOT_FOUND.format(url)) raise Exception(e.msg)
def clico_sobre_icono_lupa(self): """ Clica sobre el botón con icono lupa """ try: form = self.driver.find_element_by_xpath("//form") barra_busqueda = form.find_element_by_xpath("//input[@class='botonBuscar busb']") barra_busqueda.click() except (TimeoutException, NoSuchElementException) as e: logger.error(ELEMENT_NOT_FOUND.format("Lupa/Buscar")) raise Exception(e.msg)
def compruebo_que_aparece_mensaje_despues_de_busqueda(self, mensaje): """ Localiza mensaje que aparece justo después de la barra de búsqueda sea igual al que buscamos """ try: # cabecera = self.driver.find_element_by_xpath("//div[contains(@class, 'grid_12 tituloSEO inner mini vertical')]/descendant::h1") cabecera = self.driver.find_element_by_xpath("//div[@class='inner mini mobileMarginTop']/descendant::h1") assert mensaje.lower() in cabecera.text.lower(), ELEMENT_NOT_FOUND.format(mensaje) except (TimeoutException, NoSuchElementException) as e: logger.error(ELEMENT_NOT_FOUND.format(mensaje)) raise Exception(e.msg)
def compruebo_texto_registro_incorrecto(self): """ Localiza el cuadro de texto que aparece cuando se ha producido un registro correcto """ try: mensaje = 'No se encuentra cuadro de texto' cabecera = self.driver.find_element_by_xpath("//div[contains(@class, 'alert')]") assert cabecera.is_displayed(), ELEMENT_NOT_FOUND.format(mensaje) except (TimeoutException, NoSuchElementException) as e: logger.error(ELEMENT_NOT_FOUND.format(mensaje)) raise Exception(e.msg)
def sismember_to_redis(self, db_name, proto_id): global mom global proto db = db_name_convert(db_name) result = mom[db_name].sismember(db, proto[proto_id], 1) if result is None: logger.error(result) return result
def clico_boton_con_nombre(self, nombre_boton): """ Dado el nombre de un botón lo localiza y hace click sobre él """ try: boton = self.driver.find_element_by_xpath("//input[@value='" + nombre_boton + "']") boton.submit() time.sleep(1) except (TimeoutException, NoSuchElementException) as e: logger.error(ELEMENT_NOT_FOUND.format("Boton: " + nombre_boton)) raise Exception(e.msg)
def relleno_campo_busqueda_con_el_texto(self, texto): """ Rellena el campo de búsqueda con el texto dado """ try: form = self.driver.find_element_by_xpath('//form') campo_busqueda = form.find_element_by_xpath("//input[@id='busqueda']") campo_busqueda.clear() campo_busqueda.send_keys(str(texto)) except (TimeoutException, NoSuchElementException) as e: logger.error(FIELD_NOT_FOUND.format('Busqueda')) raise Exception(e.msg)
def hago_click_sobre_icono_seleccionar_plataforma(self): """ Hace click directamente sobre el icono de los sistemas operativos de la barra de búsqueda""" try: form = self.driver.find_element_by_xpath("//form") barra_busqueda = form.find_element_by_xpath( ".//input[@id='opcionMenuSelected_mac'] and //span[@class='icono verMas']" ) barra_busqueda.click() except TimeoutException as e: logger.error(PLATFORM_NOT_FOUND.format(str("Busqueda"))) raise Exception(e.msg)
def HAdoopShellCmds(cmd): #logger.warn(robo.get_library_instance('SSHLibrary')) #ssh = robo.get_library_instance('SSHLibrary') Login_To(NN_IP,un="admin",pw="admin@123") journalnodes = [] host_mapping = IpHostName() var = robo.get_variable_value("${shellPrompt}") ssh.set_client_configuration(prompt=var) #logger.info(cmd,also_console=True) pmxcmd = "pmx subshell hadoop_yarn %s"%cmd logger.info(pmxcmd,also_console=True) ssh.write(pmxcmd) output = ssh.read_until_prompt() if pmxcmd.find("stop") != -1: for line in output.splitlines(): if line.find("Stopping JournalNode on Host:") != -1: match = re.search('.*:\s(.*)$',line) if match: nodes = str(match.group(1)) journalnodes.append(nodes) else: logger.error("Journal nodes cannt be stopped from Subshell commands") #logger.console(host_mapping) #logger.console(host_mapping["EIGHTY-DN82"]) #logger.info(host_mapping[journalnodes[0]], also_console = True) logger.console(journalnodes,stream='stdout') for i in journalnodes: output = executeRemote("ps -aef | grep -i journalnode | grep -v grep | awk '{print $2 \":\" $9 \":\" $12 \":\" $22}'", host_mapping[i]) #logger.info(output,also_console=True) for line in output.splitlines(): if line.find("Dproc_journalnode") != -1: logger.warn("Journal Nodes are still UP", html=True) return False elif pmxcmd.find("start") != -1: for line in output.splitlines(): if line.find("Starting JournalNode on Host:") != -1: match = re.search('.*:\s(.*)$',line) if match: nodes = str(match.group(1)) journalnodes.append(nodes) else: logger.error("Journal nodes cannot be stopped from Subshell commands") logger.console(journalnodes,stream='stdout') for i in journalnodes: #logger.info(i,also_console=True) #logger.info(host_mapping[i],also_console=True) output = executeRemote("ps -aef | grep -i journalnode | grep -v grep | awk '{print $2 \":\" $9 \":\" $12 \":\" $22}'", host_mapping[i]) #logger.info(output,also_console=True) for line in output.splitlines(): if line.find("Dproc_journalnode") != -1: logger.warn("Journal Nodes are Running", html=True) return True
def aparece_mensaje_error_mail(self, texto_error): """ Localiza el mensaje de error que devuelve el campo del email""" try: label_error = self.driver.find_element_by_xpath("//p[@class='colorRojo']") assert texto_error in label_error.text, "Textos no coinciden %s != %s" % (texto_error, label_error.text) except (TimeoutException, NoSuchElementException) as e: fichero = 'email_incorrecto' + str(randint(0, 9999)) + '.png' self.driver.get_screenshot_as_file(fichero) # si hay algún error cerramos sesión del usuario logueado para que no afecte al resto de pruebas self.driver.get('http://www.portalprogramas.com/comunidad/cerrarSesion') logger.error(ELEMENT_NOT_FOUND.format('class: colorRojo')) raise Exception('Compruebe captura pantalla con el error: ' + fichero)
def setup_dut(self, node): ssh = SSH() ssh.connect(node) ssh.scp('resources/libraries/bash/dut_setup.sh', '/tmp/dut_setup.sh') (ret_code, stdout, stderr) = \ ssh.exec_command('sudo -Sn bash /tmp/dut_setup.sh') logger.trace(stdout) if 0 != int(ret_code): logger.error('DUT {0} setup script failed: "{1}"'. format(node['host'], stdout + stderr)) raise Exception('DUT test setup script failed at node {}'. format(node['host']))
def __extract_tarball_at_node(self, tarball, node): logger.console('Extracting tarball to {0} on {1}'.format( con.REMOTE_FW_DIR, node['host'])) ssh = SSH() ssh.connect(node) cmd = 'rm -rf {1}; mkdir {1} ; sudo -Sn tar -zxf {0} -C {1};'.format( tarball, con.REMOTE_FW_DIR) (ret_code, stdout, stderr) = ssh.exec_command(cmd, timeout=30) if 0 != ret_code: logger.error('Unpack error: {0}'.format(stderr)) raise Exception('Failed to unpack {0} at node {1}'.format( tarball, node['host']))
def get_from_redis(self, db_name, proto_id, dict_fields): global mom global proto db = db_name_convert(db_name) result = mom[db_name].get(db, proto[proto_id], 1) GETPROTO[proto_id] = result if result is None: logger.error(result) return {} else: for field in dict_fields: dict_fields[field] = analyse_result(result, field, dict_fields) return dict_fields
def setup_xml(self, test_name="", user_pin="123456", so_pin="12345678", loop_times="", data_len=""): """ Setup the xml file:data.xml, config param include test_name, user_pin, so_pin, loop_times, data_len. example: | setup xml | test_name | user_pin | so_pin | loop_times | data_len | | setup xml | 101 | 123456 | 12345678 | 50 | 32 | """ if os.path.exists(PATH(r"../res/data.xml")): os.remove(PATH(r"../res/data.xml")) doc = minidom.Document() doc.standalone = True root_node = doc.createElement("root") doc.appendChild(root_node) test_node = doc.createElement("Test") # book_node.setAttribute("isbn", "34909023") root_node.appendChild(test_node) test_name_node = doc.createElement("Testname") user_pin_node = doc.createElement("Userpin") so_pin_node = doc.createElement('Sopin') loop_times_node = doc.createElement('Looptimes') data_len_node = doc.createElement('Datalen') test_node.appendChild(test_name_node) test_node.appendChild(user_pin_node) test_node.appendChild(so_pin_node) test_node.appendChild(loop_times_node) test_node.appendChild(data_len_node) test_name_text_node = doc.createTextNode(test_name) test_name_node.appendChild(test_name_text_node) user_pin_text_node = doc.createTextNode(user_pin) user_pin_node.appendChild(user_pin_text_node) so_pin_text_node = doc.createTextNode(so_pin) so_pin_node.appendChild(so_pin_text_node) loop_times_text_node = doc.createTextNode(loop_times) loop_times_node.appendChild(loop_times_text_node) data_len_text_node = doc.createTextNode(data_len) data_len_node.appendChild(data_len_text_node) # doc.writexml(f, "/t", "/t", "/n", "utf-8") f = open(PATH(r"../res/data.xml"), "w") test_node.toprettyxml(encoding="utf-8") doc.writexml(f, encoding="utf-8") f.close() if os.path.exists(PATH(r"../res/data.xml")): logger.info("Setup xml file successfully.") else: logger.error("Failed to setup xml file.") return -1
def ConfigureGMS(self): logger.console(self.configs["gms_ip"]) self.LoginTo(self.configs["gms_ip"]) #self.cleanRPMmachine() logger.info("Now installing rpms and configuring gms-server on %s"%self.gms_ip, also_console=True) ssh.write("id") line = ssh.read_until_prompt() #logger.console(line) ''' if line.find("402") != -1: logger.console("With reflex user") ssh.set_client_configuration(prompt="#") ssh.write("exit") logger.info("Exiting reflex-user",also_console=True) line = ssh.read_until_prompt() #ssh.write("yum install -y reflex-tm") #ssh.write("yum install -y reflex-gms") line = ssh.read_until_prompt() logger.info(line,also_console=True) else: ''' self.CreateReflexUser(self.gms_ip) ssh.write("yum install -y reflex-gms") output = ssh.read_until_prompt(loglevel="INFO") for line in output.splitlines(): line = line.strip(" ") if line.find("already installed and latest version") != -1: logger.console("reflex-gms already installed", newline="yes", stream="stdout") elif line.find("failed") != -1: logger.console("reflex-gms failed during installation", newline="yes", stream="stdout") logger.error("failure", html=True) elif line.find("Error: Package:") != -1: m1 = re.search('Error: Package:\s.*\s\(') if m1: package = m1.group() logger.console(line, newline="yes", stream="stdout") logger.error(package, html=True) logger.info("Reflex-GMS installed successfully",also_console=True) #logger.info("Switching to reflex user on %s"%self.gms_ip, also_console=True) #check reflex rpm also. Not remembered why ?? ssh.write("service httpd restart") output = ssh.read_until_prompt() logger.info(output, html=True, also_console=False) self.SwitchToReflex() self.ReflexKeysGeneration() self.cliPrompt() ssh.write("pgsql mode external") ssh.write("pm process pgsqld restart") ssh.write("pm process gms_server restart")
def connect_to_redis(self, db_name): global mom global glb try: if glb == None: glb = RgGlobal('RF_demo') db = db_name_convert(db_name) if glb.connect_sync(db) != 0: sys.exit() mom[db_name] = RgMomSync(glb) except Exception as ex: logger.error(str(ex)) raise Exception(str(ex))
def get_net_settings(self, port): if port.upper() == "LAN": s = self.pwc.get_lan_status() elif port.upper() == "WAN": s = self.pwc.get_wan_status() else: logger.error("port: %s not defined." % (port)) raise ValueError settings = super(APWL_PortalWebBase, self)._DS_Net_Settings( connection_status=s['connection_status'], ip_address=s['ip_address'], subnet_mask=s['subnet_mask'], mac_address=s['mac_address'], dns_server=s['dns_server']) return settings
def _get_info_for_image(self, file_name): argument_list = [self.get_identify_path, "-quiet", "-format", "%[fx:w]\\n%[fx:h]", file_name] try: procces = subprocess.Popen(argument_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # , shell=True procces.wait() output = procces.communicate() if procces.returncode != 0: message = "processing failed msg= %s" % str(output) raise AssertionError(message) # logger.info("file Name \t:" + file_name+str(output)) table_results = output[0].split() return table_results[0], table_results[1] except OSError as e: logger.error(e)
def scp_and_execute_script(self, vat_name, node, timeout=15, json_out=True): """Copy vat_name script to node, execute it and return result. :param vat_name: Name of the vat script file. Full path and name of the script is required. :param node: Node to execute the VAT script on. :param timeout: Seconds to allow the script to run. :param json_out: Require JSON output. :type vat_name: str :type node: dict :type timeout: int :type json_out: bool :returns: Status code, stdout and stderr of executed VAT script. :rtype: tuple :raises RuntimeError: If VAT script execution failed. """ ssh = SSH() try: ssh.connect(node) except: raise SSHException("Cannot open SSH connection to execute VAT " "command(s) from template {0}".format(vat_name)) ssh.scp(vat_name, vat_name) cmd = "sudo -S {vat} {json} in {input} script".format( json="json" if json_out is True else "", vat=Constants.VAT_BIN_NAME, input=vat_name) try: (ret_code, stdout, stderr) = ssh.exec_command(cmd, timeout) except SSHTimeout: logger.error("VAT script execution timeout: {0}".format(cmd)) raise except: raise RuntimeError("VAT script execution failed: {0}".format(cmd)) self._ret_code = ret_code self._stdout = stdout self._stderr = stderr self._delete_files(node, vat_name)
def exec_cmd(node, cmd, timeout=600, sudo=False, disconnect=False): """Convenience function to ssh/exec/return rc, out & err. Returns (rc, stdout, stderr). :param node: The node to execute command on. :param cmd: Command to execute. :param timeout: Timeout value in seconds. Default: 600. :param sudo: Sudo privilege execution flag. Default: False. :param disconnect: Close the opened SSH connection if True. :type node: dict :type cmd: str or OptionString :type timeout: int :type sudo: bool :type disconnect: bool :returns: RC, Stdout, Stderr. :rtype: tuple(int, str, str) """ if node is None: raise TypeError(u"Node parameter is None") if cmd is None: raise TypeError(u"Command parameter is None") if not cmd: raise ValueError(u"Empty command parameter") ssh = SSH() try: ssh.connect(node) except SSHException as err: logger.error(f"Failed to connect to node {node[u'host']}\n{err!r}") return None, None, None try: if not sudo: ret_code, stdout, stderr = ssh.exec_command(cmd, timeout=timeout) else: ret_code, stdout, stderr = ssh.exec_command_sudo( cmd, timeout=timeout ) except SSHException as err: logger.error(repr(err)) return None, None, None finally: if disconnect: ssh.disconnect() return ret_code, stdout, stderr
def create_new_vulnerability(self, vul_dict): url = "{}/api/vulnerability/create".format(self.threatplaybook) if 'scan' not in vul_dict and 'name' not in vul_dict: raise Exception("Mandatory fields 'scan' and 'name' not in Result") vul_push = { "scan": vul_dict.get('scan'), "name": vul_dict.get('name'), "cwe": vul_dict.get('cwe', 0), "severity": int(vul_dict.get('severity', 0)), } if 'description' in vul_dict: vul_push['description'] = vul_dict.get('description') if 'observation' in vul_dict: vul_push['observation'] = vul_dict.get('observation') if 'remediation' in vul_dict: vul_push['remediation'] = vul_dict.get('remediation') evid = [] if 'evidences' in vul_dict: if not isinstance(vul_dict.get('evidences'), list): raise Exception("Evidences have to be list definition") else: for single_evid in vul_dict.get('evidences'): svid = { "url": single_evid.get('url', ''), } if 'param' in single_evid: svid['param'] = single_evid.get('param') if 'log' in single_evid: svid["log"] = b64encode( single_evid.get('log').encode()).decode() evid.append(svid) if evid: vul_push['evidences'] = evid resp = requests.post(url, json=vul_push, headers={"Authorization": self.token}) if resp.status_code != 200: logger.error(resp.content()) logger.info("Successfully pushed Vulnerability Data")
def check_service(self, pro_alias): adb_cmd = "adb shell \"ps | grep " + str(pro_alias) + "\"" print "adb_cmd: ", adb_cmd pro_details = os.popen(adb_cmd).read() print "pro_details: ", pro_details # print "proDetails: "+ proDetails # logger.info(proDetails, also_console=True) is_null = (len(pro_details) == 0) if is_null: logger.error(pro_alias + " is not alive.") return -1 elif pro_details.endswith("./ServerCenter"): logger.info(pro_alias + " is alive.") else: return pro_details
def get_individual_response_field(self, field_to_get): """ Returns the specified data field from the first record on the response, if available. Otherwise, an error is thrown. """ if not self.response: logger.error("No data matching query conditions was returned.") raise AssertionError( "Failed to retrieve data required for this test.") try: data = self.response[field_to_get] except KeyError: raise AssertionError( "Field not found in response from {table}: {field}".format( table=self.query_table, field=field_to_get)) return data
def load_json_from_file(self, file_name): """Load JSON from file. Return json as a dictionary object. Arguments: - file_name: absolute json file name Return json object (list or dictionary) Examples: | ${result}= | Load Json From File | /path/to/file.json | """ logger.debug("Check if file exists") if os.path.isfile(file_name) is False: logger.error("JSON file: " + file_name + " not found") raise IOError with open(file_name) as json_file: data = json.load(json_file) return data
def _open_csv_file_for_write(filename, data, csv_writer=csv.writer, **kwargs): with open(filename, 'ab') as csv_handler: writer = csv_writer(csv_handler, **kwargs) try: if isinstance(writer, csv.DictWriter) and 'fieldnames' in list( kwargs.keys()): csv_handler.truncate() writer.writeheader() writer.writerows(data) except csv.Error as e: logger.error('file %s, line %d: %s' % (filename, writer.line_num, e))
def connect_to_redis_cluster(redis_host, redis_port): # pragma: no cover """Connect To Redis Cluster. Arguments: - redis_host: hostname or IP address of the Redis server. - redis_port: Redis port number (default=6379) Return redis connection object Examples: | ${redis_conn}= | Connect To Redis Cluster | redis-dev.com | 6379 | """ startup_nodes = [{"host":redis_host, "port":redis_port}] try: redis_conn = redis.StrictRedisCluster(startup_nodes=startup_nodes, decode_responses=True, skip_full_coverage_check=True) except Exception as ex: logger.error(str(ex)) raise Exception(str(ex)) return redis_conn
def relleno_campo_texto_con_etiqueta_con_texto(self, etiqueta, texto): """ Rellena un campo de texto buscando su etiqueta como nombre """ try: # localizamos la etiqueta del text box que buscamos form = self.driver.find_element_by_xpath('//form') label = form.find_element_by_xpath('//*[contains(@class, "ocultarUsuarios")]' '//strong[normalize-space(text())="' + etiqueta + '"]') # después de localizar la etiqueta del text box buscamos su hermano más inmediato que es donde # se ubica el text box input textbox = label.find_element_by_xpath('./following::input') textbox.clear() textbox.send_keys(texto) except (TimeoutException, NoSuchElementException) as e: logger.error(FIELD_NOT_FOUND.format(etiqueta)) raise Exception(e.msg)
def wait_for_success(self): """ Summary: Waits for success message in the UI """ try: myElem = WebDriverWait(self.driver, 5).until( EC.presence_of_element_located( (By.XPATH, 'id("content")/div[2]/article[1]/div[1]'))) e = self.driver.find_element( By.XPATH, 'id("content")/div[2]/article[1]/div[1]') info = str(e.text) logger.console('{}'.format(info), newline=False) except: logger.error('ERROR')
def check_vdlog(self): if not TBoxCore.is_connected(): raise TBoxCoreError( "Exception on querying vdlog file(Please insert the ADB cable)" ) (status, output) = Utils.getstatusoutput("adb shell ls /data") try: if not status: vdlog = re.findall('vdlog', output)[0] logger.info(self._tag + "vdlog is existed: True") except IndexError: logger.error(self._tag + "vdlog is existed: False") logger.info(self._tag + "Creating vdlog file...") Utils.getstatusoutput('adb shell touch /data/vdlog') if not status: logger.info(self._tag + "Succeed to create vdlog file")
def end_suite(data, result): """Called at the end of a test suite""" logger.info('Start of end_suite listener') try: json_data = '[' for item in SUITE_ANNOTATIONS: json_data = json_data + json.dumps(item.__dict__) + ',' json_data = json_data.rstrip(',') + ']' robot_out = os.environ.get('ROBOTOUT') file_path = os.path.join(robot_out, ANNOTATIONS_FILE) with open(file_path, 'w') as json_file: json_file.write(json_data) logger.info('Annotations file created at: %s' % file_path) except Exception as e: logger.error('Unable to generate annotations file') logger.error(e)
def reset_simulator_reading_count(): try: logger.info('▶ Reset simulator reading count.', also_console=True) conn = http.client.HTTPConnection(host=configuration.SIMULATOR_HOST, port=1503, timeout=10) conn.request(method="GET", url="/reading/count/reset") r = conn.getresponse() except Exception as e: logger.debug('▶ Fail to reset simulator reading count. {}'.format(e)) raise e if int(r.status) == 200: logger.info('▶ Reset simulator reading count to 0', also_console=True) else: logger.error('▶ Fail to reset simulator reading count.') raise Exception('Fail to reset simulator reading count.')
def async_get(self, handle): """ Blocks until the thread created by async_run returns Use: Async get ${handle} """ assert handle in self._thread_pool, 'Invalid async call handle' try: result = self._thread_pool[handle].result_queue.get(True, timeout=15) except Empty: logger.error(f"Async get keyword didn't retrieve result") return del self._thread_pool[handle] return result
def manage_npmaudit_results(self, result_file): severity_dict = {'moderate': 2, 'low': 1, 'critical': 3, 'high': 3} url = "{}/api/vulnerability/create".format(self.threatplaybook) with open(result_file, 'r') as jfile: results = json.loads(jfile.read()) if results: create_scan_query = self.create_scan("npmaudit") if 'data' in create_scan_query: scan = create_scan_query.get('data').get('name') if 'advisories' not in results: logger.info("No Advisories in report") pass else: for sl, vul_result in results.get('advisories').items(): vul_dict = { 'name': vul_result.get('title'), 'description': vul_result.get('overview', ''), 'scan': scan, 'cwe': int(vul_result.get('cwe').split('-')[1], 0), 'severity': int( severity_dict.get( vul_result.get('severity', 'low'))), } evidences = [] for finding in vul_result.get('findings'): evid = { "param": finding.get('version', '0.0'), "url": ":".join(finding.get('paths')) } evidences.append(evid) vul_dict['evidences'] = evidences resp = requests.post( url, json=vul_dict, headers={"Authorization": self.token}) if resp.status_code != 200: raise Exception( "Unable to create NPM Audit Finding") else: logger.error("Unable to create scan for NPM Audit")
def get_attached_devices(self, band, iface_type): s = "" xpath = "table" if band == "wired": s = "Wired Devices" xpath = "div/" + xpath elif "2.4" in band: if iface_type == "guest": s = "2.4 GHz Guest Wireless Devices" else: s = "2.4 GHz Wireless Devices" elif "5" in band: if iface_type == "guest": s = "5 GHz Guest Wireless Devices" else: s = "5 GHz Wireless Devices" else: logger.error("Attached device for %s %s not exist" % (band, iface_type)) raise ValueError logger.debug("Checking attached devices\t on: %s" % (s)) self._linkElement("Status") time.sleep(2) self._linkElement("Attached Devices") time.sleep(15) msg = self._captureWebInfo( "//h4[contains(.,'%s')]/following-sibling::%s/tbody" % (s, xpath)) dl = msg.split("\n") list_info = [] if len(dl) <= 1: return None else: del dl[0] for d in dl: logger.debug("device:%s." % d) di = d.split(" ") info = { 'ip': str(di[1]), 'mac': str(di[2]), 'name': str(di[3]), 'alias': str(di[4]) } list_info.append(info) return list_info
def __init__(self, project_name, default_connection=True): ''' Initialize the ThreatPlaybook API By default, the param ``default_connection`` is set to true. This means that ThreatPlaybook will attempt to connect to the local MongoDB instance on DB ``threat_playbook`` with no authentication or other params If you want to connect to a mongoDB instance with authentication and other params, you need to enable Environment Variables: ``TP_MONGO_USER`` => Mongo Username ``TP_MONGO_PASS`` => Mongo Password ``TP_MONGO_HOST`` => Mongo Host IP ``TP_MONGO_PORT`` => Mongo Port All these params have to be set if the default_connection is initialized with `False` | ThreatPlaybook | project_name | default_connection=True/False | ''' try: if not default_connection: logger.warn( "Running MongoDB without Authentication. Highly recommend running MongoDB with Authentication" ) connect('threat_playbook') else: mongo_user = os.getenv("TP_MONGO_USER", None) mongo_pass = os.getenv("TP_MONGO_PASS", None) mongo_host = os.getenv("TP_MONGO_HOST", None) mongo_port = os.getenv("TP_MONGO_PORT", None) connect(db='threat_playbook', username=mongo_user, password=mongo_pass, host=mongo_host, port=mongo_port) except: logger.error("Unable to connect to DB. Disconnecting") exit(1) Project.objects(name=project_name).update_one(name=project_name, upsert=True) self.project = Project.objects.get(name=project_name) new_session = Session(project=self.project) new_session.save() self.session = new_session
def create_env_directory_at_node(node): """Create fresh virtualenv to a directory, install pip requirements.""" logger.console('Extracting virtualenv, installing requirements.txt ' 'on {0}'.format(node['host'])) ssh = SSH() ssh.connect(node) (ret_code, stdout, stderr) = ssh.exec_command( 'cd {0} && rm -rf env && ' 'virtualenv --system-site-packages --never-download env && ' '. env/bin/activate && ' 'pip install -r requirements.txt'.format(con.REMOTE_FW_DIR), timeout=100) if ret_code != 0: logger.error('Virtualenv creation error: {0}'.format(stdout + stderr)) raise Exception('Virtualenv setup failed') else: logger.console('Virtualenv created on {0}'.format(node['host']))
def __JsonList(self, Real_data, Expect_data): try: logger.debug('调用JsonList函数 实响应数据: %s 预期数据:%s' % (Real_data, Expect_data)) if len(Real_data) < len(Expect_data): return '0', 'List Values are not equal! ' + 'Real_data: ' + str( Real_data) + ' Expect_data: ' + str(Expect_data) for single_Real_data in Real_data: single_Real_data_index = Real_data.index(single_Real_data) if single_Real_data_index > len(Expect_data) - 1: return '0', str( single_Real_data) + ' in Expect_data not found!' single_Expect_data = Expect_data[ single_Real_data_index] # 期望返回的数据 logger.debug('实响应数据 %s' % single_Real_data) logger.debug('预期数据 %s' % single_Expect_data) if single_Expect_data != single_Real_data: # 如果列表的中字典不相同,则判断具体那里不同 if isinstance(single_Expect_data, dict) and isinstance( single_Real_data, dict): # 字典类型 State, Info = self.__JsonDict(single_Real_data, single_Expect_data) if State == '0': logger.info('数据匹配失败.详情:%s' % Info) return State, Info continue elif isinstance(single_Expect_data, list) and isinstance( single_Real_data, list): # 字典类型 State, Info = self.__JsonList(single_Real_data, single_Expect_data) if State == '0': logger.info('数据匹配失败.详情:%s' % Info) return State, Info continue else: # 判断普通类型 return '0', 'Values are not equal! Real: ' + str( single_Real_data) + ' Expect: ' + str( single_Expect_data) elif single_Expect_data == single_Real_data: logger.debug('匹对成功退出当次循环') continue else: return '0', 'unknown error' return '1', None # 数据正确 except KeyError, e: logger.error('发生异常!!!错误信息%s' % e) return '0', 'KeyError:' + e
def update_device_service_admin_state(admin_state): conn = http.client.HTTPConnection(host=SettingsInfo().constant.BASE_URL, port=59881, timeout=5) conn.request(method="PUT", url="/api/v1/deviceservice/name/{}/adminstate/{}".format( configuration.SERVICE_NAME, admin_state)) try: r1 = conn.getresponse() except Exception as e: raise e if int(r1.status) == 200: logger.debug( '▶ Update device service admin state to {}'.format(admin_state)) else: logger.error('▶ Fail to update the admin state.') raise Exception('Fail to update the admin state')
def execute_script(self, vat_name, node, timeout=15, json_out=True): """Copy local_path script to node, execute it and return result. :param vat_name: Name of the vat script file. Only the file name of the script is required, the resources path is prepended automatically. :param node: Node to execute the VAT script on. :param timeout: Seconds to allow the script to run. :param json_out: Require JSON output. :type vat_name: str :type node: dict :type timeout: int :type json_out: bool :returns: Status code, stdout and stderr of executed VAT script. :rtype: tuple :raises RuntimeError: If VAT script execution failed. """ ssh = SSH() try: ssh.connect(node) except: raise SSHException("Cannot open SSH connection to execute VAT " "command(s) from template {0}".format(vat_name)) remote_file_path = '{0}/{1}/{2}'.format(Constants.REMOTE_FW_DIR, Constants.RESOURCES_TPL_VAT, vat_name) # TODO this overwrites the output if the vat script has been used twice # remote_file_out = remote_file_path + ".out" cmd = "sudo -S {vat} {json} in {input} script".format( vat=Constants.VAT_BIN_NAME, json="json" if json_out is True else "", input=remote_file_path) try: (ret_code, stdout, stderr) = ssh.exec_command(cmd, timeout) except SSHTimeout: logger.error("VAT script execution timeout: {0}".format(cmd)) raise except: raise RuntimeError("VAT script execution failed: {0}".format(cmd)) self._ret_code = ret_code self._stdout = stdout self._stderr = stderr
def _acquire_value_set(self, *tags): if self._remotelib: try: while True: self._setname, self._valueset = self._remotelib.run_keyword('acquire_value_set', [self._my_id]+list(tags), {}) if self._setname: logger.info('Value set "%s" acquired' % self._setname) return self._setname time.sleep(PabotLib._pollingSeconds) if PabotLib._polling_logging: logger.debug('waiting for a value set') except RuntimeError as err: logger.error("RuntimeError catched in remote _acquire_value_set execution. Maybe there is no connection - is pabot called with --pabotlib option? ErrorDetails: {0}".format(repr(err))) self.__remotelib = None raise self._setname, self._valueset = _PabotLib.acquire_value_set(self, self._my_id, *tags) return self._setname
def acquire_lock(self, name): """ Wait for a lock with name. [https://pabot.org/PabotLib.html?ref=log#acquire-lock|Open online docs.] """ if self._remotelib: try: while not self._remotelib.run_keyword('acquire_lock', [name, self._my_id], {}): time.sleep(PabotLib._pollingSeconds) if PabotLib._polling_logging: logger.debug('waiting for lock to release') return True except RuntimeError as err: logger.error("RuntimeError catched in remote acquire_lock execution. Maybe there is no connection - is pabot called with --pabotlib option? ErrorDetails: {0}".format(repr(err))) self.__remotelib = None raise return _PabotLib.acquire_lock(self, name, self._my_id)
def _validate_mandatory_values(self): """Validate that mandatory profile values satisfy their constraints :returns: whether mandatory values are acceptable :rtype: bool """ # Level 3: Mandatory params: Check if urls is a list: is_valid = True if not isinstance(self.traffic_profile[u"urls"], list): logger.error(u"The parameter 'urls' must be a list.") is_valid = False # Level 3: Mandatory params: Check if integers are not below minimum for param, minimum in self.INTEGER_PARAMS: if not self._validate_int_param(param, minimum): is_valid = False return is_valid
def new_proto_object(self, proto_id, proto_name, message, dict_fields): global proto proto[proto_id] = eval(proto_name + '.' + message + '()') for field in dict_fields: #logger.error(type(dict_fields[field])) if isinstance(dict_fields[field], list ): for i in range(len(dict_fields[field])): exec('proto[proto_id].'+ field + '.add()') if isinstance(dict_fields[field][i], (dict, type(DotDict))): for sub in dict_fields[field][i]: logger.error('proto[proto_id].' + field + '['+ str(i)+'].'+sub+' = ' + dict_fields[field][i][sub]) exec('proto[proto_id].' + field + '['+ str(i)+'].'+sub+' = ' + dict_fields[field][i][sub]) else: exec('proto[proto_id].'+ field +'['+ str(i)+']='+ str(dict_fields[field][i])) else: exec('proto[proto_id].' + field + ' = ' + dict_fields[field]) return proto[proto_id]
def _open_csv_file_for_read(filename, csv_reader=csv.reader, line_numbers=None, **kwargs): if line_numbers is not None and isinstance(line_numbers, list): line_numbers = map(int, line_numbers) with open(filename, 'r') as csv_handler: reader = csv_reader(csv_handler, **kwargs) try: for line_number, row in enumerate(reader): if line_numbers is None: yield row elif isinstance(line_numbers, list): if line_number in line_numbers: yield row line_numbers.remove(line_number) if len(line_numbers) == 0: break except csv.Error as e: logger.error('file %s, line %d: %s' % (filename, reader.line_num, e))
def raise_from(raising, excepted): """Function to be replaced by "raise from" in Python 3. Neither "six" nor "future" offer good enough implementation right now. chezsoi.org/lucas/blog/displaying-chained-exceptions-stacktraces-in-python-2 Current implementation just logs excepted error, and raises the new one. :param raising: The exception to raise. :param excepted: The exception we excepted and want to log. :type raising: BaseException :type excepted: BaseException :raises: raising """ logger.error("Excepted: {exc!r}\nRaising: {rai!r}".format( exc=excepted, rai=raising)) raise raising
def decrypt_and_extract_the_dump_file(src_dir, decryptor_path): logger.console('Extracting the support dump file') i = 1 for filename in os.listdir(src_dir): while filename.endswith(".part"): logger.console("waiting %s seconds for the download to complete" % (i * 30)) time.sleep(30) i = i + 1 if i >= 240: logger.error("Took more than 2 hours to download , so failing the test case") if not os.path.isfile(src_dir + "//" + filename): logger.console("Download completed") break if (platform.system() == 'Windows'): decryptor_extractor(r"decrypt-support-dump.bat", src_dir, decryptor_path) else: # (platform.system() == 'Linux'): decryptor_extractor(r"decrypt-support-dump.sh", src_dir, decryptor_path, "linux")
def samba_download(self, usb_id, download_file): director = urllib2.build_opener(SMBHandler) fh = None # prevent reference before assignment try: fh = director.open('smb://*****:*****@192.168.8.1/usb-' + str(usb_id) + '/' + download_file) #feng add 20170126 time.sleep(10) except urllib2.URLError: logger.error("URLError in samba download, usb id: " + str(usb_id) + ", download file: " + str(download_file) + ". USB probably not detected correctly") if fh: print type(fh) save_file = open(download_file, 'w') save_file.write(fh) fh.close()
def upload_file_ftp(self, usb_id, filename, username, password): local_f = open(filename, 'rb') try: ftp = FTP(self.host_ip, username, password) ftp.cwd('usb-' + str(usb_id)) ftp.storbinary('STOR %s' % filename, local_f) except ftplib.error_perm: logger.error("550 Failed to change directory, usb id: " + str(usb_id) + " filename: " + str(filename) + " usr:pass: "******":" + password + ". USB probably not detected correctly") #--feng add 20170202 logger.console("550 Failed to change directory, usb id: " + str(usb_id) + " filename: " + str(filename) + " usr:pass: "******":" + password + ". USB probably not detected correctly") ftp.close()