def _nothing_in_flight(): """Checks if there is no ajax in flight and also logs current status """ prev_log_msg = _thread_local.ajax_log_msg try: running = in_flight() except Exception as e: # if jQuery in error message, a non-cfme page (proxy error) is displayed # should be handled by something else if "jquery" not in str(e).lower(): raise return True anything_in_flight = False anything_in_flight |= running["jquery"] > 0 anything_in_flight |= running["prototype"] > 0 anything_in_flight |= running["spinner"] anything_in_flight |= running["document"] != "complete" log_msg = ', '.join(["{}: {}".format(k, str(v)) for k, v in running.iteritems()]) # Log the message only if it's different from the last one if prev_log_msg != log_msg: _thread_local.ajax_log_msg = log_msg logger.trace('Ajax running: {}'.format(log_msg)) if (not anything_in_flight) and prev_log_msg: logger.trace('Ajax done') return not anything_in_flight
def get_transport(self, *args, **kwargs): if self.connected: logger.trace('reusing ssh transport') else: logger.trace('connecting new ssh transport') self.connect() return super(SSHClient, self).get_transport(*args, **kwargs)
def get_search_filter_spec(self, *args, **kwargs): # A datastore traversal spec is missing from this method in psphere. # psav has opened a PR to add it, but until it gets merged we'll need to come behind # psphere and add it in just like his PR does # https://github.com/jkinred/psphere/pull/18/files pfs = super(_PsphereClient, self).get_search_filter_spec(*args, **kwargs) select_sets = pfs.objectSet[0].selectSet missing_ss = 'datacenter_datastore_traversal_spec' ss_names = [ss.name for ss in select_sets] if missing_ss not in ss_names: logger.trace('Injecting %s into psphere search filter spec', missing_ss) # pull out the folder traversal spec traversal specs fts_ts = pfs.objectSet[0].selectSet[0] # and get the select set from the traversal spec fts_ss = fts_ts.selectSet[0] # add ds selection spec to folder traversal spec dsss = self.create('SelectionSpec', name=missing_ss) fts_ts.selectSet.append(dsss) # add ds traversal spec to search filter object set select spec dsts = self.create('TraversalSpec') dsts.name = 'datacenter_datastore_traversal_spec' dsts.type = 'Datacenter' dsts.path = 'datastoreFolder' dsts.selectSet = [fts_ss] select_sets.append(dsts) else: logger.warning('%s already in psphere search filer spec, not adding it', missing_ss) return pfs
def detect_observed_field(loc): """Detect observed fields; sleep if needed Used after filling most form fields, this function will inspect the filled field for one of the known CFME observed field attribues, and if found, sleep long enough for the observed field's AJAX request to go out, and then block until no AJAX requests are in flight. Observed fields occasionally declare their own wait interval before firing their AJAX request. If found, that interval will be used instead of the default. """ try: if is_displayed(loc): el = element(loc) else: # Element not visible, sort out return except StaleElementReferenceException: return # Default wait period, based on the default UI wait (700ms) # plus a little padding to let the AJAX fire before we wait_for_ajax default_wait = .8 # Known observed field attributes observed_field_markers = ( 'data-miq_observe', 'data-miq_observe_date', 'data-miq_observe_checkbox', ) for attr in observed_field_markers: try: observed_field_attr = el.get_attribute(attr) break except NoSuchAttributeException: pass else: # Failed to detect an observed text field, short out return try: attr_dict = json.loads(observed_field_attr) interval = float(attr_dict.get('interval', default_wait)) # Pad the detected interval, as with default_wait interval += .1 except (TypeError, ValueError): # ValueError and TypeError happens if the attribute value couldn't be decoded as JSON # ValueError also happens if interval couldn't be coerced to float # In either case, we've detected an observed text field and should wait interval = default_wait logger.trace(' Observed field detected, pausing %.1f seconds' % interval) sleep(interval) wait_for_ajax()
def _nothing_in_flight(): """Checks if there is no ajax in flight and also logs current status """ prev_log_msg = _thread_local.ajax_log_msg # 5.5.z and 5.7.0.4+ if not store.current_appliance.is_miqqe_patch_candidate: try: anything_in_flight = in_flight( "return ManageIQ.qe.anythingInFlight()") except Exception as e: # if jQuery in error message, a non-cfme page (proxy error) is displayed # should be handled by something else if "jquery" not in str(e).lower(): raise return True running = execute_script("return ManageIQ.qe.inFlight()") log_msg = ', '.join( ["{}: {}".format(k, str(v)) for k, v in running.iteritems()]) # 5.6.z, 5.7.0.{1,2,3} else: try: running = in_flight(js.in_flight) except Exception as e: # if jQuery in error message, a non-cfme page (proxy error) is displayed # should be handled by something else if "jquery" not in str(e).lower(): raise return True anything_in_flight = False anything_in_flight |= running["jquery"] > 0 anything_in_flight |= running["prototype"] > 0 anything_in_flight |= running["spinner"] anything_in_flight |= running["document"] != "complete" anything_in_flight |= running["autofocus"] > 0 anything_in_flight |= running["debounce"] > 0 anything_in_flight |= running["miqQE"] > 0 log_msg = ', '.join( ["{}: {}".format(k, str(v)) for k, v in running.iteritems()]) # Log the message only if it's different from the last one if prev_log_msg != log_msg: _thread_local.ajax_log_msg = log_msg logger.trace('Ajax running: %s', log_msg) if (not anything_in_flight) and prev_log_msg: logger.trace('Ajax done') return not anything_in_flight
def _nothing_in_flight(): """Checks if there is no ajax in flight and also logs current status """ prev_log_msg = _thread_local.ajax_log_msg # 5.5.z and 5.7.0.4+ if not store.current_appliance.is_miqqe_patch_candidate: try: anything_in_flight = in_flight("return ManageIQ.qe.anythingInFlight()") except Exception as e: # if jQuery in error message, a non-cfme page (proxy error) is displayed # should be handled by something else if "jquery" not in str(e).lower(): raise return True running = execute_script("return ManageIQ.qe.inFlight()") log_msg = ', '.join(["{}: {}".format(k, str(v)) for k, v in running.iteritems()]) # 5.6.z, 5.7.0.{1,2,3} else: try: running = in_flight(js.in_flight) except Exception as e: # if jQuery in error message, a non-cfme page (proxy error) is displayed # should be handled by something else if "jquery" not in str(e).lower(): raise return True anything_in_flight = False anything_in_flight |= running["jquery"] > 0 anything_in_flight |= running["prototype"] > 0 anything_in_flight |= running["spinner"] anything_in_flight |= running["document"] != "complete" anything_in_flight |= running["autofocus"] > 0 anything_in_flight |= running["debounce"] > 0 anything_in_flight |= running["miqQE"] > 0 log_msg = ', '.join(["{}: {}".format(k, str(v)) for k, v in running.iteritems()]) # Log the message only if it's different from the last one if prev_log_msg != log_msg: _thread_local.ajax_log_msg = log_msg logger.trace('Ajax running: %s', log_msg) if (not anything_in_flight) and prev_log_msg: logger.trace('Ajax done') return not anything_in_flight
def pytest_pycollect_makeitem(collector, name, obj): """pytest hook that adds docstring metadata (if found) to a test's meta mark""" if not isinstance(obj, FunctionType) and not hasattr(obj, 'meta'): # This relies on the meta mark having already been applied to # all test functions before this hook is called return # __doc__ can be empty or nonexistent, make sure it's an empty string in that case metadata = get_meta(obj) if not hasattr(obj.meta, 'kwargs'): obj.meta.kwargs = dict() obj.meta.kwargs.update({'from_docs': metadata}) if metadata: test_path = get_rel_path(collector.fspath) logger.debug('Parsed docstring metadata on {} in {}'.format( name, test_path)) logger.trace('{} doc metadata: {}'.format(name, str(metadata)))
def pytest_pycollect_makeitem(collector, name, obj): """pytest hook that adds docstring metadata (if found) to a test's meta mark""" if not isinstance(obj, FunctionType) and not hasattr(obj, 'meta'): # This relies on the meta mark having already been applied to # all test functions before this hook is called return # __doc__ can be empty or nonexistent, make sure it's an empty string in that case metadata = get_meta(obj) if not hasattr(obj.meta, 'kwargs'): obj.meta.kwargs = dict() obj.meta.kwargs.update({ 'from_docs': metadata }) if metadata: test_path = get_rel_path(collector.fspath) logger.debug('Parsed docstring metadata on {} in {}'.format(name, test_path)) logger.trace('{} doc metadata: {}'.format(name, str(metadata)))
def globaltrace(frame, why, arg): if frame.f_code.co_filename.endswith("tracer.py"): return globaltrace if why == "line": # line execution event filename = frame.f_code.co_filename lineno = frame.f_lineno - 1 try: padding = " " * (len(str(len(file_store[filename]))) - len(str(lineno))) line = file_store[filename][lineno].strip("\n") except IndexError: line = "" if len(frames) <= scope: logger.trace("{}:{}{} {}".format(len(frames), frame.f_lineno, padding, line)) if why == "call": frames.append(frame) if len(frames) <= scope: s = "-" * len(frames) c = ">" * len(frames) logger.trace("{}{} call".format(s, c)) if why == "return": if len(frames) <= scope: s = "-" * len(frames) c = "<" * len(frames) logger.trace("{}{} call".format(s, c)) frames.pop() return globaltrace
def get_search_filter_spec(self, *args, **kwargs): # A datastore traversal spec is missing from this method in psphere. # psav has opened a PR to add it, but until it gets merged we'll need to come behind # psphere and add it in just like his PR does # https://github.com/jkinred/psphere/pull/18/files pfs = super(_PsphereClient, self).get_search_filter_spec(*args, **kwargs) select_sets = pfs.objectSet[0].selectSet missing_ss = 'datacenter_datastore_traversal_spec' ss_names = [ss.name for ss in select_sets] if missing_ss not in ss_names: logger.trace('Injecting %s into psphere search filter spec', missing_ss) # pull out the folder traversal spec traversal specs fts_ts = pfs.objectSet[0].selectSet[0] # and get the select set from the traversal spec fts_ss = fts_ts.selectSet[0] # add ds selection spec to folder traversal spec dsss = self.create('SelectionSpec', name=missing_ss) fts_ts.selectSet.append(dsss) # add ds traversal spec to search filter object set select spec dsts = self.create('TraversalSpec') dsts.name = 'datacenter_datastore_traversal_spec' dsts.type = 'Datacenter' dsts.path = 'datastoreFolder' dsts.selectSet = [fts_ss] select_sets.append(dsts) else: logger.warning( '%s already in psphere search filer spec, not adding it', missing_ss) return pfs
def wait_for(func, func_args=[], func_kwargs={}, **kwargs): """Waits for a certain amount of time for an action to complete Designed to wait for a certain length of time, either linearly in 1 second steps, or exponentially, up to a maximum. Returns the output from the function once it completes successfully, along with the time taken to complete the command. Note: If using the expo keyword, the returned elapsed time will be inaccurate as wait_for does not know the exact time that the function returned correctly, only that it returned correctly at last check. Args: func: A function to be run func_args: A list of function arguments to be passed to func func_kwargs: A dict of function keyword arguments to be passed to func num_sec: An int describing the number of seconds to wait before timing out. timeout: Either an int describing the number of seconds to wait before timing out. Or a :py:class:`timedelta` object. Or a string formatted like ``1h 10m 5s``. This then sets the ``num_sec`` variable. expo: A boolean flag toggling exponential delay growth. message: A string containing a description of func's operation. If None, defaults to the function's name. fail_condition: An object describing the failure condition that should be tested against the output of func. If func() == fail_condition, wait_for continues to wait. Can be a callable which takes the result and returns boolean whether to fail. You can also specify it as a set, that way it checks whether it is present in the iterable. handle_exception: A boolean controlling the handling of excepetions during func() invocation. If set to True, in cases where func() results in an exception, clobber the exception and treat it as a fail_condition. delay: An integer describing the number of seconds to delay before trying func() again. fail_func: A function to be run after every unsuccessful attempt to run func() quiet: Do not write time report to the log (default False) silent_failure: Even if the entire attempt times out, don't throw a exception. Returns: A tuple containing the output from func() and a float detailing the total wait time. Raises: TimedOutError: If num_sec is exceeded after an unsuccessful func() invocation. """ st_time = time.time() total_time = 0 if "timeout" in kwargs and kwargs["timeout"] is not None: timeout = kwargs["timeout"] if isinstance(timeout, (int, float)): num_sec = float(timeout) elif isinstance(timeout, basestring): num_sec = _parse_time(timeout) elif isinstance(timeout, timedelta): num_sec = timeout.total_seconds() else: raise ValueError("Timeout got an unknown value {}".format(timeout)) else: num_sec = kwargs.get('num_sec', 120) expo = kwargs.get('expo', False) message = kwargs.get('message', None) if isinstance(func, partial): line_no = "<partial>" filename = "<partial>" if not message: params = ", ".join([str(arg) for arg in func.args]) message = "partial function %s(%s)" % (func.func.func_name, params) else: line_no = func.func_code.co_firstlineno filename = func.func_code.co_filename if not message: message = "function %s()" % func.func_name fail_condition = kwargs.get('fail_condition', False) if callable(fail_condition): fail_condition_check = fail_condition elif isinstance(fail_condition, set): fail_condition_check = lambda result: result in fail_condition else: fail_condition_check = lambda result: result == fail_condition handle_exception = kwargs.get('handle_exception', False) delay = kwargs.get('delay', 1) fail_func = kwargs.get('fail_func', None) quiet = kwargs.get("quiet", False) silent_fail = kwargs.get("silent_failure", False) t_delta = 0 tries = 0 logger.trace('Started {} at {}'.format(message, st_time)) while t_delta <= num_sec: try: tries += 1 out = func(*func_args, **func_kwargs) except: if handle_exception: out = fail_condition else: logger.info("Wait for {} took {} tries and {} seconds before failure.".format( message, tries, time.time() - st_time)) raise if out is fail_condition or fail_condition_check(out): time.sleep(delay) total_time += delay if expo: delay *= 2 if fail_func: fail_func() else: duration = time.time() - st_time if not quiet: logger.trace('Took {:0.2f} to do {}'.format(duration, message)) logger.trace('Finished {} at {}, {} tries'.format(message, st_time + t_delta, tries)) return WaitForResult(out, duration) t_delta = time.time() - st_time logger.trace('Finished at {}'.format(st_time + t_delta)) if not silent_fail: logger.error("Couldn't complete {} at {}:{} in time, took {:0.2f}, {} tries".format(message, filename, line_no, t_delta, tries)) logger.error('The last result of the call was: {}'.format(str(out))) raise TimedOutError("Could not do {} at {}:{} in time".format(message, filename, line_no)) else: logger.warning("Could not do {} at {}:{} in time ({} tries) but ignoring".format(message, filename, line_no, tries)) logger.warning('The last result of the call was: {}'.format(str(out)))
def wait_for(func, func_args=[], func_kwargs={}, **kwargs): """Waits for a certain amount of time for an action to complete Designed to wait for a certain length of time, either linearly in 1 second steps, or exponentially, up to a maximum. Returns the output from the function once it completes successfully, along with the time taken to complete the command. Note: If using the expo keyword, the returned elapsed time will be inaccurate as wait_for does not know the exact time that the function returned correctly, only that it returned correctly at last check. Args: func: A function to be run func_args: A list of function arguments to be passed to func func_kwargs: A dict of function keyword arguments to be passed to func num_sec: An int describing the number of seconds to wait before timing out. timeout: Either an int describing the number of seconds to wait before timing out. Or a :py:class:`timedelta` object. Or a string formatted like ``1h 10m 5s``. This then sets the ``num_sec`` variable. expo: A boolean flag toggling exponential delay growth. message: A string containing a description of func's operation. If None, defaults to the function's name. fail_condition: An object describing the failure condition that should be tested against the output of func. If func() == fail_condition, wait_for continues to wait. Can be a callable which takes the result and returns boolean whether to fail. You can also specify it as a set, that way it checks whether it is present in the iterable. handle_exception: A boolean controlling the handling of excepetions during func() invocation. If set to True, in cases where func() results in an exception, clobber the exception and treat it as a fail_condition. delay: An integer describing the number of seconds to delay before trying func() again. fail_func: A function to be run after every unsuccessful attempt to run func() quiet: Do not write time report to the log (default False) silent_failure: Even if the entire attempt times out, don't throw a exception. Returns: A tuple containing the output from func() and a float detailing the total wait time. Raises: TimedOutError: If num_sec is exceeded after an unsuccessful func() invocation. """ st_time = time.time() total_time = 0 if "timeout" in kwargs and kwargs["timeout"] is not None: timeout = kwargs["timeout"] if isinstance(timeout, (int, float)): num_sec = float(timeout) elif isinstance(timeout, basestring): num_sec = _parse_time(timeout) elif isinstance(timeout, timedelta): num_sec = timeout.total_seconds() else: raise ValueError("Timeout got an unknown value {}".format(timeout)) else: num_sec = kwargs.get('num_sec', 120) expo = kwargs.get('expo', False) message = kwargs.get('message', None) if isinstance(func, partial): line_no = "<partial>" filename = "<partial>" if not message: params = ", ".join([str(arg) for arg in func.args]) message = "partial function %s(%s)" % (func.func.func_name, params) else: line_no = func.func_code.co_firstlineno filename = func.func_code.co_filename if not message: message = "function %s()" % func.func_name fail_condition = kwargs.get('fail_condition', False) if callable(fail_condition): fail_condition_check = fail_condition elif isinstance(fail_condition, set): fail_condition_check = lambda result: result in fail_condition else: fail_condition_check = lambda result: result == fail_condition handle_exception = kwargs.get('handle_exception', False) delay = kwargs.get('delay', 1) fail_func = kwargs.get('fail_func', None) quiet = kwargs.get("quiet", False) silent_fail = kwargs.get("silent_failure", False) t_delta = 0 tries = 0 logger.trace('Started {} at {}'.format(message, st_time)) while t_delta <= num_sec: try: tries += 1 out = func(*func_args, **func_kwargs) except: if handle_exception: out = fail_condition else: logger.info( "Wait for {} took {} tries and {} seconds before failure.". format(message, tries, time.time() - st_time)) raise if out is fail_condition or fail_condition_check(out): time.sleep(delay) total_time += delay if expo: delay *= 2 if fail_func: fail_func() else: duration = time.time() - st_time if not quiet: logger.trace('Took {:0.2f} to do {}'.format(duration, message)) logger.trace('Finished {} at {}, {} tries'.format( message, st_time + t_delta, tries)) return WaitForResult(out, duration) t_delta = time.time() - st_time logger.trace('Finished at {}'.format(st_time + t_delta)) if not silent_fail: logger.error( "Couldn't complete {} at {}:{} in time, took {:0.2f}, {} tries". format(message, filename, line_no, t_delta, tries)) logger.error('The last result of the call was: {}'.format(str(out))) raise TimedOutError("Could not do {} at {}:{} in time".format( message, filename, line_no)) else: logger.warning( "Could not do {} at {}:{} in time ({} tries) but ignoring".format( message, filename, line_no, tries)) logger.warning('The last result of the call was: {}'.format(str(out)))