def create_session(self, alias, url, headers={}, cookies=None, auth=None, timeout=None, proxies=None, verify=False, max_retries=0): """ Create Session: create a HTTP session to a server `url` Base url of the server `alias` Robot Framework alias to identify the session `headers` Dictionary of default headers `auth` List of username & password for HTTP Basic Auth `timeout` connection timeout `proxies` Dictionary that contains proxy urls for HTTP and HTTPS communication `verify` set to True if Requests should verify the certificate `max_retries` The maximum number of retries each connection should attempt. """ auth = requests.auth.HTTPBasicAuth(*auth) if auth else None logger.info('Creating Session using : alias=%s, url=%s, headers=%s, cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s ' % (alias, url, headers, cookies, auth, timeout, proxies, verify)) return self._create_session(alias, url, headers, cookies, auth, timeout, proxies, verify, max_retries)
def reboot_vm_os(self, name): """Initiate a reboot in the guest OS in the VM, returning immediately. """ vm = self._get_vm(name) vm.reboot_guest() logger.info("VM %s reboot initiated." % name)
def revoke_company_permission(self, driver, group_label, group_id, company_label, company_id): ''' From the Design document, you already know the group_label, group_id, company_label, company_id So, we use this parameter to select the new permission and check it. ''' group_elem_xpath = self.page.get_xpath(group_id, 'group') comp_elem_xpath = self.page.get_xpath(company_id, 'single') try: comp_elem = self.page.find_element_by_xpath(driver, comp_elem_xpath) if comp_elem.get_attribute('checked') is None: logger.warn("..The permission '%s' was NOT checked before. Continue without change"%company_label) else: time.sleep(1) comp_elem.click() logger.info("====Revoked the company permission '%s/%s'"%(group_label, company_label)) except (NoSuchElementException, ElementNotVisibleException): group_elem = self.page.find_element_by_xpath(driver, group_elem_xpath) group_elem.click() comp_elem = self.page.find_element_by_xpath(driver, comp_elem_xpath) if comp_elem.get_attribute('checked') is None: logger.warn("..The permission '%s' was NOT checked before. Continue without change"%company_label) else: time.sleep(1) comp_elem.click() logger.info("====Revoked the company permission '%s/%s'"%(group_label, company_label))
def navigate_to_add_user_page(self, driver): ext_url = r"/servlet/UserSelect?action=add_user_profile" full_url = self.page.get_base_url(driver) + ext_url time.sleep(10) logger.info("Go directly to add company page") driver.get(full_url)
def shutdown_vm_os(self, name): """Initiate a shutdown in the guest OS in the VM, returning immediately. """ vm = self._get_vm(name) vm.shutdown_guest() logger.info("VM %s shutdown initiated." % name)
def capture_screenshot(self, filename=None): ''' Captures a screenshot of the current screen and embeds it in the test report Also works in headless environments. This keyword might fail if the instrumentation backend is unable to make a screenshot at the given point in time. If you want to try again making a screenshot, you want to wrap this like | Wait Until Keyword Succeeds | 20 seconds | 5 seconds | Capture Screenshot | If you don't care about the screenshot (as it is a nice-to-have addition to your test report but not neccessary), use it like this: | Run Keyword And Ignore Error | Capture Screenshot | `filename` Location where the screenshot will be saved (optional). ''' path, link = self._get_screenshot_paths(filename) response = self._request("get", urljoin(self._url, 'screenshot')) if response.status_code == 500: raise AssertionError("Unable to make a screenshot, see documentation on how to handle this") assert response.status_code == 200, "InstrumentationBackend sent status %d, expected 200" % response.status_code with open(path, 'w') as f: f.write(response.content) f.close() logger.info('</td></tr><tr><td colspan="3"><a href="%s">' '<img src="%s"></a>' % (link, link), True, False)
def report_sauce_status(name, status, tags=[], remote_url=''): # Parse username and access_key from the remote_url assert USERNAME_ACCESS_KEY.match(remote_url), 'Incomplete remote_url.' username, access_key = USERNAME_ACCESS_KEY.findall(remote_url)[0][1:] # Get selenium session id from the keyword library selenium = BuiltIn().get_library_instance('Selenium2Library') job_id = selenium._current_browser().session_id # Prepare payload and headers token = (':'.join([username, access_key])).encode('base64').strip() payload = {'name': name, 'passed': status == 'PASS', 'tags': tags} headers = {'Authorization': 'Basic {0}'.format(token)} # Put test status to Sauce Labs url = 'https://saucelabs.com/rest/v1/{0}/jobs/{1}'.format(username, job_id) response = requests.put(url, data=json.dumps(payload), headers=headers) assert response.status_code == 200, response.text # Log video url from the response video_url = json.loads(response.text).get('video_url') if video_url: logger.info('<a href="{0}">video.flv</a>'.format(video_url), html=True)
def _get_stream_from_pcapfile(self,filename): '''read pcap file and return bytes stream''' if not os.path.isfile(filename): logger.info('%s is not a file' % filename) raise AssertionError('%s is not file or path error' % filename) with open(filename,'rb') as handle: return handle.read()
def get_all_stats(path): logger.info('Getting stats from <a href="file://%s">%s</a>' % (path, path), html=True) stats_line = _get_stats_line(path) logger.debug('Stats line: %s' % stats_line) total, tags, suite = eval(stats_line) return total, tags, suite
def get_value_from_string(self,string,regexp): ''' KeyWord: Get Value From String, 从匹配字符串中按照正则表达式找出子串 args: - string: 匹配字符串 - regexp: 正则表达式 return: - None,未匹配成功 - (group(1),group(2),...),匹配成功后的子串元组 examples: | Get Value From String | $output | '.*?,\\s+address is (.*?),\s+' | ''' if not string: return None if not regexp: logger.info('regexp is None,please check') return None research = re.compile(regexp) m = research.search(string) if m: g = m.groups() if len(g) == 1: return g[0] return g else: logger.info('not search %r in %s' % (regexp,string)) return None
def find_all_from_string(self,string,regexp): ''' KeyWord: Find All From String, 从匹配字符串中找出所有符合正则表达式的子串 args: - string: 匹配字符串 - regexp: 正则表达式 return: - None,未匹配成功 - (group(1),group(2),...),所有的子串元组 examples: | Find All From String | $output | '.*?,\\s+address is (.*?),\s+' | ''' if not regexp: logger.info('regexp is None,please check') return None research = re.compile(regexp) m = research.findall(string) if m: return m else: logger.info('not find %r in %s' % (regexp,string)) return None
def unregister(self, strategy_name): if strategy_name in self._default_strategies: raise AttributeError("Cannot unregister the default strategy '" + strategy_name + "'") elif strategy_name not in self._strategies: logger.info("Cannot unregister the non-registered strategy '" + strategy_name + "'") else: del self._strategies[strategy_name]
def install_openafs(self): """Install the OpenAFS client and server binaries.""" if get_var('DO_INSTALL') == False: logger.info("Skipping install: DO_INSTALL is False") return uname = os.uname()[0] dist = get_var('AFS_DIST') if dist == "transarc": if get_var('TRANSARC_TARBALL'): run_keyword("Untar Binaries") run_keyword("Install Server Binaries") run_keyword("Install Client Binaries") run_keyword("Install Workstation Binaries") run_keyword("Install Shared Libraries") if uname == "Linux": run_keyword("Install Init Script on Linux") elif uname == "SunOS": run_keyword("Install Init Script on Solaris") else: raise AssertionError("Unsupported operating system: %s" % (uname)) elif dist in ('rhel6', 'suse'): rpm = Rpm.current() run_keyword("Install RPM Files", *rpm.get_server_rpms()) run_keyword("Install RPM Files", *rpm.get_client_rpms()) else: raise AssertionError("Unsupported AFS_DIST: %s" % (dist))
def split_to_lines(self, string, start=0, end=None): """Converts the `string` into a list of lines. It is possible to get only a selection of lines from `start` to `end` so that `start` index is inclusive and `end` is exclusive. Line numbering starts from 0, and it is possible to use negative indices to refer to lines from the end. Lines are returned without the newlines. The number of returned lines is automatically logged. Examples: | @{lines} = | Split To Lines | ${manylines} | | | | @{ignore first} = | Split To Lines | ${manylines} | 1 | | | @{ignore last} = | Split To Lines | ${manylines} | | -1 | | @{5th to 10th} = | Split To Lines | ${manylines} | 4 | 10 | | @{first two} = | Split To Lines | ${manylines} | | 1 | | @{last two} = | Split To Lines | ${manylines} | -2 | | Use `Get Line` if you only need to get a single line. """ start = self._convert_to_index(start, 'start') end = self._convert_to_index(end, 'end') lines = string.splitlines()[start:end] logger.info('%d lines returned' % len(lines)) return lines
def start_process(self, command, *arguments, **configuration): """Starts a new process on background. See `Specifying command and arguments` and `Process configuration` for more information about the arguments. Makes the started process new `active process`. Returns an identifier that can be used as a handle to active the started process if needed. """ config = ProcessConfig(**configuration) executable_command = self._cmd(arguments, command, config.shell) logger.info('Starting process:\n%s' % executable_command) logger.debug('Process configuration:\n%s' % config) process = subprocess.Popen(executable_command, stdout=config.stdout_stream, stderr=config.stderr_stream, stdin=subprocess.PIPE, shell=config.shell, cwd=config.cwd, env=config.env, universal_newlines=True) self._results[process] = ExecutionResult(process, config.stdout_stream, config.stderr_stream) return self._processes.register(process, alias=config.alias)
def web_confirm_alert_ok(self): """对弹出的确认对话框选择OK | Web Confirm Alert Ok | | """ logger.info("点击Alert框确定按钮") self.confirm_action()
def collect_lua_coverage(): """ Merges ${TMPDIR}/*.luacov.stats.out into luacov.stats.out Example: | Collect Lua Coverage | """ # decided not to do optional coverage so far #if not 'ENABLE_LUA_COVERAGE' in os.environ['HOME']: # logger.info("ENABLE_LUA_COVERAGE is not present in env, will not collect Lua coverage") # return tmp_dir = BuiltIn().get_variable_value("${TMPDIR}") coverage = {} input_files = [] for f in glob.iglob("%s/*.luacov.stats.out" % tmp_dir): _merge_luacov_stats(f, coverage) input_files.append(f) if input_files: if os.path.isfile(LUA_STATSFILE): _merge_luacov_stats(LUA_STATSFILE, coverage) _dump_luacov_stats(LUA_STATSFILE, coverage) logger.info("%s merged into %s" % (", ".join(input_files), LUA_STATSFILE)) else: logger.info("no *.luacov.stats.out files found in %s" % tmp_dir)
def get_request( self, alias, uri, headers=None, params=None, allow_redirects=None, timeout=None): """ Send a GET request on the session object found using the given `alias` `alias` that will be used to identify the Session object in the cache `uri` to send the GET request to `params` url parameters to append to the uri `headers` a dictionary of headers to use with the request `timeout` connection timeout """ session = self._cache.switch(alias) redir = True if allow_redirects is None else allow_redirects response = self._get_request( session, uri, params, headers, redir, timeout) logger.info( 'Get Request using : alias=%s, uri=%s, headers=%s ' % (alias, uri, headers)) return response
def delete_request( self, alias, uri, data=(), params=None, headers=None, allow_redirects=None, timeout=None): """ Send a DELETE request on the session object found using the given `alias` `alias` that will be used to identify the Session object in the cache `uri` to send the DELETE request to `headers` a dictionary of headers to use with the request `timeout` connection timeout """ session = self._cache.switch(alias) data = self._utf8_urlencode(data) redir = True if allow_redirects is None else allow_redirects response = self._delete_request( session, uri, data, params, headers, redir, timeout) if isinstance(data, str): data = data.decode('utf-8') logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \ headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir)) return response
def Validate_perm_in_LDAP(self,user_perm): if self.Check_ldap_perm(user_perm): msg="Permission %s exists in LDAP"%user_perm print msg logger.info(msg) else: raise AssertionError("Permission %s does not exist in LDAP"%user_perm)
def options_request( self, alias, uri, headers=None, allow_redirects=None, timeout=None): """ Send an OPTIONS request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the OPTIONS request to ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``headers`` a dictionary of headers to use with the request """ session = self._cache.switch(alias) redir = True if allow_redirects is None else allow_redirects response = self._options_request(session, uri, headers, redir, timeout) logger.info( 'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' % (alias, uri, headers, redir)) return response
def shenickXmlSamplingIntervalChange(self, sourceXmlFile, samplingInterval): """ This procedure will modify an existing Shenick (XML) file using the desired Sampling Interval. *Parameters* : - paramDict : <Dictionary> ; dictionary contains key, value type. discription | *Key* | *Value type* | *Comment* | | *sourceXmlFile* | <string> | XML file name (with file path) of script which will be modified | | *samplingInterval* | <string> | Desired sampling interval. Please use 30sec, 1min, or 5min | *Returns* : None """ # Set a sampling interval that is compatible to Shenick XML formatting if str(samplingInterval) == '30sec': sampleInterval = 'Thirty Seconds' elif str(samplingInterval) == '1min': sampleInterval = 'One Minute' elif str(samplingInterval) == '5min': sampleInterval = 'Five Minutes' else: raise AssertionError('Invalid sampling interval entered. Please use 30sec, 1min, or 5min.') # Set up source file for parsing and modification xmlFile = sourceXmlFile + '.xml' # Search through XML file for desired element and replace value with given sampling interval tree = ET.parse(xmlFile) for elem in tree.findall('.//tce_normal_stats_sample_interval'): elem.text = sampleInterval # Write XML 'tree' to new XML file and inform user of success status try: tree.write(xmlFile) logger.info('XML file was successfully modified and rewritten', False, True) except: raise AssertionError('XML file was not successfully modified and rewritten')
def _run_keyword_in_java_application(self, name, args, **kwargs): remote_url = "http://127.0.0.1:{0}".format(self._port) remote_java_app = Remote(remote_url) try: # Convert all the arguments to strings string_arguments = [] for argument in args: string_arguments.append(str(argument)) # The interface of the Remote library changes in robot framework 2.8.3 to take an additional dictionary for keyword arguments if robot.version.get_version() >= '2.8.3': return_value = remote_java_app.run_keyword(name, string_arguments, kwargs) else: return_value = remote_java_app.run_keyword(name, string_arguments) except Exception as inst: # Take a screenshot if, we need to if self._take_screenshot_on_failure: output_directory = BuiltIn().replace_variables('${OUTPUTDIR}') screenshot_file_name = 'screenshot_java_0000.png' index = 0 while os.path.exists(os.path.join(output_directory, screenshot_file_name)): index += 1 screenshot_file_name = 'screenshot_java_{0:04d}.png'.format(index) # Use the remote library directly to avoid infinite loops # The interface of the Remote library changes in robot framework 2.8.3 to take an additional dictionary for keyword arguments if robot.version.get_version() >= '2.8.3': remote_java_app.run_keyword("takeScreenshot", [os.path.join(output_directory, screenshot_file_name)], {}) else: remote_java_app.run_keyword("takeScreenshot", [os.path.join(output_directory, screenshot_file_name)]) # Log link to screenshot in ancjhor to make the screenshot clickable for a bigger version logger.info('<a href="{0}"><img src="{0}" width="600px" /></a>'.format( screenshot_file_name.replace("\\", "/")), html=True) # Raise exception back to robot framework raise return return_value
def _log_message(self, message): msg = message.get('message') html = message.get('html') == 'yes' png = None try: png = self.ui_driver.take_screenshot() if self.no_image_file and html: encoded_base64 = base64.b64encode(png) logger.info( '<img alt="{}" width="{}" height="{}" ' 'src="data:image/png;base64,{}" />'.format( msg, *self.image_size, b''.join( encoded_base64[index * 40:(index + 1) * 40] for index in range( int((len(encoded_base64) / 40)) + 1 ) ).decode('ascii') ), html=True ) except: pass # Ignore exceptions while taking screenshots finally: if png: del png
def _run_command_with_java_tool_options(self, alias, command, webstart_main_class_regexp): orig_java_tool_options = _get_java_tool_options() tool_options = self.get_java_agent_option(webstart_main_class_regexp) logger.info('Settings JAVA_TOOL_OPTIONS={0}'.format(tool_options)) os.environ['JAVA_TOOL_OPTIONS'] = tool_options _start(alias, command) os.environ['JAVA_TOOL_OPTIONS'] = orig_java_tool_options
def select_email_by_sender(self, sender_name): locator = self.elements['mail_rows'] my_rows = self._selenium2Library.element_find(locator, False, False, None) logger.info(str(len(my_rows))) length = len(my_rows) print my_rows for i in range(len(my_rows)): # BuiltIn().sleep(1, "New Row----") # logger.info("Row: " + str(i)) # logger.info(str(len(my_rows))) row = my_rows[i] # logger.info(str(row)) self._selenium2Library.wait_until_element_is_displayed(row, self._selenium2Library.timeout) element_sender = row.find_element(By.XPATH, ".//div[starts-with(@class,'from')]") # logger.info("Found element_sender.") self._selenium2Library.wait_until_element_is_displayed(element_sender, self._selenium2Library.timeout) # logger.info("Is Sender element displayed? " + str(element_sender.is_displayed())) current_sender_name = element_sender.text.strip() # logger.info("Current Sender Name: " + current_sender_name) if current_sender_name == sender_name: # logger.info("Found matched") chk_select = row.find_element(By.XPATH, ".//input[@role='checkbox']") if not chk_select.is_selected(): chk_select.click() # Refresh the rows list my_rows = self._selenium2Library.element_find(locator, False, False, None)
def compare_screenshot_to_base(baseline, diff=100): """Calculate the exact difference between two images. :param string baseline: [required] base screenshot to compare :param int diff: value of maximum difference Example:: Compare screenshot to base base_screenshot.jpg """ path = _get_screenshot() current_browser = _get_browser() if hasattr(current_browser, 'get_screenshot_as_file'): current_browser.get_screenshot_as_file(path) else: current_browser.save_screenshot(path) img1 = Iopen(path) img2 = Iopen(baseline) his1 = img1.histogram() his2 = img2.histogram() sqrtdiff = lambda a, b: (a - b) ** 2 rms = sqrt(reduce(add, imap(sqrtdiff, his1, his2)) / len(his1)) logger.info("RMS diff: %s" % rms) if rms > 0: idiff = difference(img1, img2) path = path.replace(".png", ".jpg") idiff.save(path) logger.info("diff image: %s" % path) if rms > diff: raise AssertionError( "Image: %s is different from baseline: %s" % (path, baseline))
def open_connection(self, host, alias=None, port=23, timeout=None, newline=None, prompt=None, prompt_is_regexp=False, encoding=None, encoding_errors=None, default_log_level=None): """Opens a new Telnet connection to the given host and port. The `timeout`, `newline`, `prompt`, `prompt_is_regexp`, `encoding`, and `default_log_level` arguments get default values when the library is [#Importing|imported]. Setting them here overrides those values for the opened connection. See `Configuration` section for more information. Possible already opened connections are cached and it is possible to switch back to them using `Switch Connection` keyword. It is possible to switch either using explicitly given `alias` or using index returned by this keyword. Indexing starts from 1 and is reset back to it by `Close All Connections` keyword. """ timeout = timeout or self._timeout newline = newline or self._newline encoding = encoding or self._encoding encoding_errors = encoding_errors or self._encoding_errors default_log_level = default_log_level or self._default_log_level if not prompt: prompt, prompt_is_regexp = self._prompt logger.info('Opening connection to %s:%s with prompt: %s' % (host, port, prompt)) self._conn = self._get_connection(host, port, timeout, newline, prompt, prompt_is_regexp, encoding, encoding_errors, default_log_level) return self._cache.register(self._conn, alias)
def refresh_page_until_email_by_subject_exist(self, subject, timeout): # locator = self.elements['mail_table'] # email_table = self._selenium2Library.element_find(locator, True, False, None) # Find the checkbox which subject is match with the given subject logger.info(subject, True, True) xpath_property = "xpath=//span[normalize-space(@title)='" + subject + "']" self._selenium2Library.reload_page_until_element_displayed(xpath_property, timeout)
def disconnect_from_database(self): """ Disconnects from database. *Arguments:* - None *Return:* - None *Examples:* | Disconnect From Database | """ if self._connectionCache.current: self._connectionCache.current.close() curIndex = self._connectionCache.current_index aliasesCache = self._connectionCache._aliases if curIndex in aliasesCache.values(): keyForDeletion = \ aliasesCache.keys()[aliasesCache.values().index(curIndex)] del self._connectionCache._aliases[keyForDeletion] self._connectionCache.current = self._connectionCache._no_current logger.info("Current database was disconnected.") cls_attr = getattr(type(self._connectionCache), 'current_index', None) if isinstance(cls_attr, property) and cls_attr.fset is not None: self._connectionCache.current_index = None
def enterGoogle(): driver = get_webdriver_instance() driver.find_element_by_xpath( Config.get_test_data("Title")).send_keys("vhjjj") logger.info("vinothl gopal") Config.set_test_data("Deal", "Sreelakshmi")
def info(msg, html=False): logger.info(msg, html)
def _geckodriver_log(self): log_file = self._get_log_path( os.path.join(self.log_dir, 'geckodriver-{index}.log')) logger.info('Firefox driver log is always forced to to: %s' % log_file) return log_file
def execute_sql_script(self, sqlScriptFileName, sansTran=False): """ Executes the content of the `sqlScriptFileName` as SQL commands. Useful for setting the database to a known state before running your tests, or clearing out your test data after running each a test. Set optional input `sansTran` to True to run command without an explicit transaction commit or rollback. Sample usage : | Execute Sql Script | ${EXECDIR}${/}resources${/}DDL-setup.sql | | Execute Sql Script | ${EXECDIR}${/}resources${/}DML-setup.sql | | #interesting stuff here | | Execute Sql Script | ${EXECDIR}${/}resources${/}DML-teardown.sql | | Execute Sql Script | ${EXECDIR}${/}resources${/}DDL-teardown.sql | SQL commands are expected to be delimited by a semi-colon (';'). For example: DELETE FROM person_employee_table; DELETE FROM person_table; DELETE FROM employee_table; Also, the last SQL command can optionally omit its trailing semi-colon. For example: DELETE FROM person_employee_table; DELETE FROM person_table; DELETE FROM employee_table Given this, that means you can create spread your SQL commands in several lines. For example: DELETE FROM person_employee_table; DELETE FROM person_table; DELETE FROM employee_table However, lines that starts with a number sign (`#`) are treated as a commented line. Thus, none of the contents of that line will be executed. For example: # Delete the bridging table first... DELETE FROM person_employee_table; # ...and then the bridged tables. DELETE FROM person_table; DELETE FROM employee_table Using optional `sansTran` to run command without an explicit transaction commit or rollback: | Execute Sql Script | ${EXECDIR}${/}resources${/}DDL-setup.sql | True | """ sqlScriptFile = open(sqlScriptFileName) cur = None try: cur = self._dbconnection.cursor() logger.info('Executing : Execute SQL Script | %s ' % sqlScriptFileName) sqlStatement = '' for line in sqlScriptFile: line = line.strip() if line.startswith('#'): continue elif line.startswith('--'): continue sqlFragments = line.split(';') if len(sqlFragments) == 1: sqlStatement += line + ' ' else: for sqlFragment in sqlFragments: sqlFragment = sqlFragment.strip() if len(sqlFragment) == 0: continue sqlStatement += sqlFragment + ' ' self.__execute_sql(cur, sqlStatement) sqlStatement = '' sqlStatement = sqlStatement.strip() if len(sqlStatement) != 0: self.__execute_sql(cur, sqlStatement) if not sansTran: self._dbconnection.commit() finally: if cur: if not sansTran: self._dbconnection.rollback()
def Find_Window(self, **kwargs): """ **Finds pywinauto window** :param kwargs: auto_id, class_name, class_name_re, title, title_re, control_type -------------- :Example: | ${window}= Find window title=File | ${var}= Call Method ${window} click_input """ windows = self.find_connected_app_windows() timeout = 10 if "timeout" in kwargs: timeout = kwargs["timeout"] del(kwargs["timeout"]) for window in windows: try: if "parent" in kwargs: if type(kwargs["parent"]) == list: for parent in kwargs["parent"]: if "text" in parent: logger.info('Finding window %s.' % parent) window = window[(parent["text"])] window.wait('exists', timeout=timeout) pass else: logger.info('Finding window %s.' % parent) window = window.window(**parent) window.wait('exists', timeout=timeout) else: logger.info('Finding window %s.' % kwargs["parent"]) window = window.window(**kwargs["parent"]) window.wait('exists', timeout=timeout) del (kwargs["parent"]) if "text" in kwargs: logger.info('Finding window %s.' % kwargs["text"]) window = window[kwargs["text"]] window.wait('exists', timeout=timeout) else: logger.info('Finding window %s.' % kwargs) window = window.window(**kwargs) window.wait('exists', timeout=timeout) window.wait('ready', timeout=timeout) window.wait('active', timeout=timeout) return window except Exception as e: error = e logger.info(str(e)) raise error
def install_vpp_on_all_duts(nodes, vpp_pkg_dir, vpp_rpm_pkgs, vpp_deb_pkgs): """Install VPP on all DUT nodes. :param nodes: Nodes in the topology. :param vpp_pkg_dir: Path to directory where VPP packages are stored. :param vpp_rpm_pkgs: List of VPP rpm packages to be installed. :param vpp_deb_pkgs: List of VPP deb packages to be installed. :type nodes: dict :type vpp_pkg_dir: str :type vpp_rpm_pkgs: list :type vpp_deb_pkgs: list :raises RuntimeError: If failed to remove or install VPP. """ logger.debug("Installing VPP") for node in nodes.values(): if node['type'] == NodeType.DUT: logger.debug("Installing VPP on node {0}".format(node['host'])) ssh = SSH() ssh.connect(node) cmd = "[[ -f /etc/redhat-release ]]" return_code, _, _ = ssh.exec_command(cmd) if int(return_code) == 0: # workaroud - uninstall existing vpp installation until # start-testcase script is updated on all virl servers rpm_pkgs_remove = "vpp*" cmd_u = 'yum -y remove "{0}"'.format(rpm_pkgs_remove) r_rcode, _, r_err = ssh.exec_command_sudo(cmd_u, timeout=90) if int(r_rcode) != 0: raise RuntimeError( 'Failed to remove previous VPP' 'installation on host {0}:\n{1}'.format( node['host'], r_err)) rpm_pkgs = "*.rpm ".join( str(vpp_pkg_dir + pkg) for pkg in vpp_rpm_pkgs) + "*.rpm" cmd_i = "rpm -ivh {0}".format(rpm_pkgs) ret_code, _, err = ssh.exec_command_sudo(cmd_i, timeout=90) if int(ret_code) != 0: raise RuntimeError('Failed to install VPP on host {0}:' '\n{1}'.format(node['host'], err)) else: ssh.exec_command_sudo("rpm -qai vpp*") logger.info("VPP installed on node {0}".format( node['host'])) else: # workaroud - uninstall existing vpp installation until # start-testcase script is updated on all virl servers deb_pkgs_remove = "vpp*" cmd_u = 'apt-get purge -y "{0}"'.format(deb_pkgs_remove) r_rcode, _, r_err = ssh.exec_command_sudo(cmd_u, timeout=90) if int(r_rcode) != 0: raise RuntimeError( 'Failed to remove previous VPP' 'installation on host {0}:\n{1}'.format( node['host'], r_err)) deb_pkgs = "*.deb ".join( str(vpp_pkg_dir + pkg) for pkg in vpp_deb_pkgs) + "*.deb" cmd_i = "dpkg -i --force-all {0}".format(deb_pkgs) ret_code, _, err = ssh.exec_command_sudo(cmd_i, timeout=90) if int(ret_code) != 0: raise RuntimeError('Failed to install VPP on host {0}:' '\n{1}'.format(node['host'], err)) else: ssh.exec_command_sudo("dpkg -l | grep vpp") logger.info("VPP installed on node {0}".format( node['host'])) ssh.disconnect(node)
def log_msg_bold(str_test=""): logger.info('<b><i>' + str_test + '</i></b>', html=True) logger.console(str_test)
def create_funds_account_GR(self, userRole=None, bankcardNo=None, mobile=None, password="******"): """ 开通个人资金存管账户 :return: """ # GET http://kappa-mp-test.ronaldinho.svc.cluster.local/cust/platform/accout/add/init?_ukey=5381&r=0.2863182303018188&custId=31&userRole=GUARANTEECORP HTTP/1.1 ret = self._cust_platform_accout_getAccountList() # 获取 custId custId = ret['data'][0]['custId'] realName = ret['data'][0]['name'] ret = self._cust_platform_accout_add_init(custId, userRole=userRole) idCardNo = ret['data']['idCardNo'] authList = [x['value'] for x in ret['data']['authList']] ret = self._custInfo_platform_person_add(realName, idCardNo, authList, custId, userRole=userRole) lanmaoly_param = ret['data']['data'].copy() del lanmaoly_param['reqDataObj'] lanmaoly_url = ret['data']['url'] j = json.loads(ret['data']['data']['reqData']) credType = j['idCardType'] headers = self.session.headers.copy() headers['Content-Type'] = "application/x-www-form-urlencoded" headers['Host'] = "hubk.lanmaoly.com" headers['Accept'] = "application/json, text/javascript, */*; q=0.01" resp = self.session.post(lanmaoly_url, data=lanmaoly_param, headers=headers) assert resp.status_code == 200 query = parse.parse_qs(parse.urlparse(resp.url)[4]) requestKey = query['requestKey'][0] if bankcardNo == None: bankcardNo = self._faker.credit_card_number() if mobile == None: mobile = self._faker.phone_number() # 检查银行卡 load = { "bankcardNo": bankcardNo, "requestKey": requestKey, "serviceType": "BANKCARD_AUTH", } resp = self.session.post(parse.urljoin( resp.url, "/bha-neo-app/gateway/bankcard/bin"), data=load, headers=headers) assert resp.status_code == 200 if json.loads(resp.text)['success'] != True: raise AssertionError("%s, 卡号:%s" % (json.loads(resp.text)['msg'], bankcardNo)) logger.info(json.dumps(json.loads(resp.text))) # 获取验证码 load = { "bizType": "REGISTER", "mobile": mobile, "requestKey": requestKey, } resp = self.session.post(parse.urljoin( resp.url, "/bha-neo-app/gateway/sms/smsForEnterprise"), data=load, headers=headers) assert resp.status_code == 200 assert json.loads(resp.text)['status'] == "SUCCESS" logger.info(json.loads(resp.text)['message']) # 提交注册 load = { "serviceType": "BANKCARD_AUTH", "realName": realName, "credType": credType, "idCardNo": idCardNo, "maskedCredNum": "", "bankcardNo": bankcardNo, "mobile": mobile, "smsCode": 150315, "password": password, "confirmPassword": password, # "protocolCheckBox" : "false", "requestKey": requestKey, } headers_html = headers.copy() headers_html[ 'Accept'] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8" resp = self.session.post(parse.urljoin( resp.url, "/bha-neo-app/gateway/mobile/personalRegisterExpand/register"), data=load, headers=headers_html) print(resp.text) assert resp.status_code == 200 return ret
def delete_all_sessions(self): """ Removes all the session objects """ logger.info('Delete All Sessions') self._cache.empty_cache()
def log_msg(str_text=""): logger.info(str_text) logger.console(str_text)
def received(self, context): self._received = context.reply if self.log: logger.info('Received:\n%s' % self.last_received(self.prettyxml))
def create_funds_account_QY(self, userRole="BORROWERS", mobile=None): """ 企业开通资金存管账户 userRole:GUARANTEECORP or BORROWERS :return: """ ret = self._cust_platform_accout_getAccountList() #获取custId custId = ret['data'][0]['custId'] ret = self._cust_platform_accout_add_init(custId, userRole=userRole) unifiedCode = ret['data']['unifiedCode'] authList = [x['value'] for x in ret['data']['authList']] ret = self._custInfo_platform_company_add(mobile, unifiedCode, authList, custId, userRole=userRole) lanmaoly_param = ret['data']['data'].copy() del lanmaoly_param['reqDataObj'] lanmaoly_url = ret['data']['url'] headers = self.session.headers.copy() headers['Content-Type'] = "application/x-www-form-urlencoded" headers['Host'] = "hubk.lanmaoly.com" headers['Accept'] = "application/json, text/javascript, */*; q=0.01" resp = self.session.post(lanmaoly_url, data=lanmaoly_param, headers=headers) assert resp.status_code == 200 query = parse.parse_qs(parse.urlparse(resp.url)[4]) requestKey = query['requestKey'][0] # 获取验证码 load = { "bizType": "REGISTER", "requestKey": requestKey, } resp = self.session.post(parse.urljoin(resp.url, "/bha-neo-app/gateway/sms/sms"), data=load, headers=headers) assert resp.status_code == 200 assert json.loads(resp.text)['status'] == "SUCCESS" logger.info(json.loads(resp.text)['message']) # 提交注册 load = { "smsCode": 150315, "requestKey": requestKey, #"protocolCheckBox":'false', "password": 150315, "confirmPassword": 150315, "bankCode": 'FJIB' } headers_html = headers.copy() headers_html[ 'Accept'] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8" resp = self.session.post(parse.urljoin( resp.url, "/bha-neo-app/gateway/mobile/enterpriseRegister/registerEnterprise" ), data=load, headers=headers_html) assert resp.status_code == 200 return ret
def restore_ecu(self, master_ip, ecu_backup, site_index='', offset='', session_index='', timeout=45): """ Restore ECU Restores ECU from a local backup file. Variable *master_ip* - master ecu ip - when restore master ecu backup to a blank ecu, use the the new ecu ip as master_ip *ecu_backup* - location of the ecu backup file to restore - if the master ecu ip changed after backup slave ecu (which was part of site) - the master ECU will be continuously telling every ECU who the master is - it is expected that there might be a short window of time where the master ip on a slave ecu is wrong. *site_index* - reference to the site id index generated by reading the ECU databases *offset* - ECU offset *session_index* - optional input, will use the most recently returned session id if not specified. - User should be able to know when to pass in session index is allowed. .. code:: robotframework *** Test Cases *** Sample # when ECU is out of site and has no db before restore ecu Restore ECU ${master_ECU_IP} ecu_backup=file # when ECU is out of site and has db before restore ecu login username password Restore ECU ${master_ECU_IP} ecu_backup=file # when ECU is in site before restore site login username password Get Database Information ${ecu_offset}= get ecu offset Restore ECU ${master_ECU_IP} SITE0 ${ecu_offset} ecu_backup=file For more information, visit `/restore-ecu`_. .. _/restore-ecu: http://wiki:8090/pages/viewpage.action?pageId=4849856#DataWebServiceAPI-/api/restore-ecu """ assert int(timeout), ValueError('Invalid timeout input.') _timeout = int(timeout) assert os.path.exists(ecu_backup), ImportError( 'Unable to find file {0}'.format(ecu_backup)) if site_index: assert site_index in siteinfo.site_ids, AssertionError( 'Invalid site {0}' 'Please select from the following sites {1}'.format( site_index, siteinfo.site_ids.keys())) if offset: assert int(offset), AssertionError('Invalid offset parameter') if site_index is '' and offset is '': with open(ecu_backup, 'rb') as backup: self._assert_json_response_stop_on_error( self._post('restore-ecu/{0}'.format(master_ip), backup, session_index=session_index)) logger.info(self.last_url) else: _offset = int(offset) with open(ecu_backup, 'rb') as backup: self._assert_json_response_stop_on_error( self._post('restore-ecu/{0}/{1}/{2}'.format( siteinfo.site_ids[site_index], _offset, master_ip), session_index=session_index), backup) logger.info(self.last_url) logger.info('Sleep 120 seconds while restoring', also_console=True) time.sleep(120) logger.info('Wait while ecu rebooting during restore call', also_console=True) for i in range(0, timeout): try: self._get_about() logger.info('Rebooting finished.') return # once you hit the return statement, you return from the function/method (either with or without a return value). except AssertionError: logger.info('Still rebooting..') # time.sleep(1) # python internal http request time out 15s to 20s raise AssertionError( 'Restore and reboot took too long, ECU is still not responding')
def zap_run_standalone_script(self, script_name): zap_script_run_status = self.zap.script.run_stand_alone_script(script_name) logger.info(zap_script_run_status)
def load_google_contacts(self): """Load Google user attributes""" # Make all users visible to robot global OS_CONFIG_PATH filename = "" is_mt = BuiltIn().get_variable_value('${is_runtype_mt}') if is_mt == "true": filename = OS_CONFIG_PATH + self._robot_runmodule_dir + '\\google_userInfo.csv' else: filename = OS_CONFIG_PATH + self._robot_runmodule_dir + '\\google_userInfo.csv' logger.info("Loading google users from config file %s " % filename) with open(filename) as f_in: lines = (line.rstrip() for line in f_in) lines = list(line for line in lines if line) # Non-blank lines in a list numPhones = -1 for line in lines: numPhones += 1 print numPhones reader = csv.DictReader(open(filename)) userDict = {} for row in reader: for column, value in row.iteritems(): userDict.setdefault(column, []).append(value) for i in range(0, int(numPhones)): userNum = i user_name = 'first_name=%s' % userDict["user_name"][userNum] user_type = 'user_type=%s' % userDict["user_type"][userNum] first_name = 'first_name=%s' % userDict["first_name"][userNum] middle_name = 'middle_name=%s' % userDict["middle_name"][userNum] last_name = 'last_name=%s' % userDict["last_name"][userNum] client_email = 'client_email=%s' % userDict["client_email"][userNum] client_password = '******' % userDict["client_password"][userNum] # server = 'server=%s' % userDict["server"][userNum] home = 'home=%s' % userDict["home"][userNum] work = 'work=%s' % userDict["work"][userNum] work_fax = 'work_fax=%s' % userDict["work_fax"][userNum] mobile = 'mobile=%s' % userDict["mobile"][userNum] pager = 'pager=%s' % userDict["pager"][userNum] tenant_id = 'tenant_id=%s' % userDict["tenant_id"][userNum] robot_address = 'robot_address=%s' % userDict["robot_address"][userNum] telnet_id = 'telnet_id=%s' % userDict["telnet_id"][userNum] company = 'company=%s' % userDict["company"][userNum] g_user_factory = BuiltIn().create_dictionary(user_type, first_name, middle_name, last_name, home, work, work_fax, mobile, pager, client_password, client_email, tenant_id, robot_address, telnet_id, company) print "gUSER %s" % g_user_factory phoneNum = userNum + 1 if phoneNum < 10: varname = '${g_user0%s}' % phoneNum else: varname = '${g_user%s}' % phoneNum BuiltIn().set_suite_variable(varname, g_user_factory)
def sending(self, context): self._sent = context.envelope self._received = None if self.log: logger.info('Sending:\n%s' % self.last_sent(self.prettyxml))
def connect_to_database(self, driverName=None, dbName=None, username=None, password=None, host='localhost', port="5432", alias=None): """ Connects to database. *Arguments:* - driverName: string, name of python database driver. - dbName: string, name of database. - username: string, name of user. - password: string, user password. - host: string, database host. - port: int, database port. - alias: string, database alias for future use. *Return:* - None *Examples:* | Connect To Database | psycopg2 | PyDB | username | password \ | localhost | 5432 | SomeCompanyDB | """ if isinstance(driverName, basestring): dbModule = __import__(driverName) else: dbModule = driverName driverName = 'Mock DB Driver' connParams = { 'database': dbName, 'user': username, 'password': password, 'host': host, 'port': port } if driverName in ("MySQLdb", "pymysql"): connParams = { 'db': dbName, 'user': username, 'passwd': password, 'host': host, 'port': port } elif driverName in ("psycopg2"): connParams = { 'database': dbName, 'user': username, 'password': password, 'host': host, 'port': port } connStr = ['%s: %s' % (k, str(connParams[k])) for k in connParams] logger.debug('Connect using: %s' % ', '.join(connStr)) dbConnection = _Connection(driverName, dbModule.connect(**connParams)) self._connectionCache.register(dbConnection, alias) logger.info("Established connection to the %s database. " "Alias %s. Driver name: %s." % (dbName, alias, driverName))
def listen(self, topic, timeout=1, limit=1): """ Listen to a topic and return a list of message payloads received within the specified time. Requires an async Subscribe to have been called previously. `topic` topic to listen to `timeout` duration to listen `limit` the max number of payloads that will be returned. Specify 0 for no limit Examples: Listen and get a list of all messages received within 5 seconds | ${messages}= | Listen | test/test | timeout=5 | limit=0 | Listen and get 1st message received within 60 seconds | @{messages}= | Listen | test/test | timeout=60 | limit=1 | | Length should be | ${messages} | 1 | """ if not self._subscribed: logger.warn('Cannot listen when not subscribed to a topic') return [] if topic not in self._messages: logger.warn('Cannot listen when not subscribed to topic: %s' % topic) return [] # If enough messages have already been gathered, return them if limit != 0 and len(self._messages[topic]) >= limit: messages = self._messages[topic][:] # Copy the list's contents self._messages[topic] = [] return messages[-limit:] seconds = convert_time(timeout) limit = int(limit) logger.info('Listening on topic: %s' % topic) timer_start = time.time() while time.time() < timer_start + seconds: if limit == 0 or len(self._messages[topic]) < limit: # If the loop is running in the background # merely sleep here for a second or so and continue # otherwise, do the loop ourselves if self._background_mqttc: time.sleep(1) else: self._mqttc.loop() else: # workaround for client to ack the publish. Otherwise, # it seems that if client disconnects quickly, broker # will not get the ack and publish the message again on # next connect. time.sleep(1) break messages = self._messages[topic][:] # Copy the list's contents self._messages[topic] = [] return messages[-limit:] if limit != 0 else messages
def open_browser(self, host): logger.info('This is keyword from KeywordClass') url = 'http://{}.com/'.format(host) browser_management = BrowserManagementKeywords(self.ctx) browser_management.open_browser(url, 'chrome')
def load_teamios_users(self): """Load teamios user attributes""" pass # Make all users visible to robot global OS_CONFIG_PATH filename = "" is_mt = BuiltIn().get_variable_value('${is_runtype_mt}') if is_mt == "true": filename = OS_CONFIG_PATH + self._robot_runmodule_dir + '\\teamios_mt_userInfo.csv' else: filename = OS_CONFIG_PATH + self._robot_runmodule_dir + '\\teamios_st_userInfo.csv' logger.info("Loading hq users from config file %s " % filename) with open(filename) as f_in: lines = (line.rstrip() for line in f_in) lines = list(line for line in lines if line) # Non-blank lines in a list numPhones = -1 for line in lines: numPhones += 1 print numPhones reader = csv.DictReader(open(filename)) userDict = {} for row in reader: for column, value in row.iteritems(): userDict.setdefault(column, []).append(value) users_list = [] for i in range(0, int(numPhones)): userNum = i user_name = 'first_name=%s' % userDict["user_name"][userNum] user_type = 'user_type=%s' % userDict["user_type"][userNum] first_name = 'first_name=%s' % userDict["first_name"][userNum] middle_name = 'middle_name=%s' % userDict["middle_name"][userNum] last_name = 'last_name=%s' % userDict["last_name"][userNum] extension = 'extension=%s' % userDict["extension"][userNum] client_id = 'client_id=%s' % userDict["client_id"][userNum] client_email = 'client_email=%s' % userDict["client_email"][userNum] client_password = '******' % userDict["client_password"][userNum] ip = 'ip=%s' % userDict["ip"][userNum] mac = 'mac=%s' % userDict["mac"][userNum] phone_type = 'phone_type=%s' % userDict["phone_model"][userNum] server = 'server=%s' % userDict["server"][userNum] home = 'home=%s' % userDict["home"][userNum] work = 'work=%s' % userDict["work"][userNum] fax = 'fax=%s' % userDict["fax"][userNum] mobile = 'mobile=%s' % userDict["mobile"][userNum] pager = 'pager=%s' % userDict["pager"][userNum] sip_did = 'sip_did=%s' % userDict["sip_trunk_did"][userNum] pri_dnis = 'pri_dnis=%s' % userDict["pri_trunk_dnis"][userNum] vm_password = '******' % userDict["vm_password"][userNum] sip_password = '******' % userDict["sip_password"][userNum] tenant_id = 'tenant_id=%s' % userDict["tenant_id"][userNum] robot_address = 'robot_address=%s' % userDict["robot_address"][userNum] telnet_id = 'telnet_id=%s' % userDict["telnet_id"][userNum] company = 'company=%s' % userDict["company"][userNum] cas_session_id = 'cas_session_id=%s' % userDict["cas_session_id"][userNum] hq_username = '******' % userDict["hq_username"][userNum] hq_password = '******' % userDict["hq_password"][userNum] user_factory = BuiltIn().create_dictionary(ip, extension, server, phone_type, user_type, mac, first_name, middle_name, last_name, home, work, fax, mobile, pager, sip_did, pri_dnis, client_password, vm_password, sip_password, client_id, client_email, tenant_id, robot_address, telnet_id, company,cas_session_id,hq_username,hq_password) print "teamios USER %s" % user_factory # if userNum > 1: # break # TODO varname only allows ten users. Increase user num phoneNum = userNum + 1 varname = '${teamios0%s}' % phoneNum logger.info("Creating teamios user dict \"%s\"" % varname) BuiltIn().set_suite_variable(varname, user_factory) logger.info("teamios user config loaded!")
def _info(self, message): if self._log_level in self.LOG_LEVEL_INFO: logger.info(message)
def get_browser_desired_capabilities(self): logger.info('Getting currently open browser desired capabilities') return self.driver.desired_capabilities
def adb_push(self, local_path=None, dest_path=None): ret_str = self._p11_utils.upload_file(local_path, dest_path) if ret_str is not None: logger.error("Upload file to terminal device failed: %s" % ret_str, html=True) return -1 logger.info("adb push %s to terminal device successfully." % local_path, html=True, also_console=True)
def _html(self, message): if self._log_level in self.LOG_LEVEL_INFO: logger.info(message, True, False)
def _embed_screenshot(self, path, width): link = utils.get_link_path(path, self._log_dir) logger.info('<a href="%s"><img src="%s" width="%s"></a>' % (link, link, width), html=True)
def example_python_keyword(self): logger.info("This is Python!")
def create_local_ssh_tunnel(self, local_port, remote_host, remote_port, *args): self.client.createLocalPortForwarder(int(local_port), remote_host, int(remote_port)) logger.info("Now forwarding port %s to %s:%s ..." % (local_port, remote_host, remote_port))
def _link_screenshot(self, path): link = utils.get_link_path(path, self._log_dir) logger.info("Screenshot saved to '<a href=\"%s\">%s</a>'." % (link, path), html=True)
def _embed_video(self, path, width): link = get_link_path(path, self._log_dir) logger.info( '<a href="%s"><video width="%s" autoplay><source src="%s" type="video/webm"></video></a>' % (link, width, link), html=True)
def create_ntlm_session( self, alias, url, auth, headers={}, cookies=None, timeout=None, proxies=None, verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0): """ Create Session: create a HTTP session to a server ``url`` Base url of the server ``alias`` Robot Framework alias to identify the session ``headers`` Dictionary of default headers ``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication ``timeout`` Connection timeout ``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication ``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to False. ``debug`` Enable http verbosity option more information https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel ``max_retries`` The maximum number of retries each connection should attempt. ``backoff_factor`` The pause between for each retry ``disable_warnings`` Disable requests warning useful when you have large number of testcases """ if not HttpNtlmAuth: raise AssertionError('Requests NTLM module not loaded') elif len(auth) != 3: raise AssertionError('Incorrect number of authentication arguments' ' - expected 3, got {}'.format(len(auth))) else: ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]), auth[2]) logger.info('Creating NTLM Session using : alias=%s, url=%s, \ headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \ proxies=%s, verify=%s, debug=%s ' % (alias, url, headers, cookies, ntlm_auth, timeout, proxies, verify, debug)) return self._create_session( alias, url, headers, cookies, ntlm_auth, timeout, max_retries, backoff_factor, proxies, verify, debug, disable_warnings)