def setUpClass(cls): super(BaseTestFixture, cls).setUpClass() #Master Config Provider #Setup root log handler only if the root logger doesn't already haves if cclogging.getLogger('').handlers == []: cclogging.getLogger('').addHandler( cclogging.setup_new_cchandler('cc.master')) #Setup fixture log, which is really just a copy of the master log #for the duration of this test fixture cls.fixture_log = cclogging.getLogger('') cls._fixture_log_handler = cclogging.setup_new_cchandler( cclogging.get_object_namespace(cls)) cls.fixture_log.addHandler(cls._fixture_log_handler) """ @todo: Upgrade the metrics to be more unittest compatible. Currently the unittest results are not available at the fixture level, only the test case or the test suite and runner level. """ # Setup the fixture level metrics cls.fixture_metrics = TestRunMetrics() cls.fixture_metrics.timer.start() # Report cls.fixture_log.info("{0}".format('=' * 56)) cls.fixture_log.info("Fixture...: {0}".format( str(cclogging.get_object_namespace(cls)))) cls.fixture_log.info("Created At: {0}" .format(cls.fixture_metrics.timer.start_time)) cls.fixture_log.info("{0}".format('=' * 56))
def setUpClass(cls): super(BaseTestFixture, cls).setUpClass() #Master Config Provider #Setup root log handler only if the root logger doesn't already haves if cclogging.getLogger('').handlers == []: cclogging.getLogger('').addHandler( cclogging.setup_new_cchandler('cc.master')) #Setup fixture log, which is really just a copy of the master log #for the duration of this test fixture cls.fixture_log = cclogging.getLogger('') cls._fixture_log_handler = cclogging.setup_new_cchandler( cclogging.get_object_namespace(cls)) cls.fixture_log.addHandler(cls._fixture_log_handler) ''' @todo: Upgrade the metrics to be more unittest compatible. Currently the unittest results are not available at the fixture level, only the test case or the test suite and runner level. ''' # Setup the fixture level metrics cls.fixture_metrics = TestRunMetrics() cls.fixture_metrics.timer.start() # Report cls.fixture_log.info("{0}".format('=' * 56)) cls.fixture_log.info("Fixture...: {0}".format( str(cclogging.get_object_namespace(cls)))) cls.fixture_log.info("Created At: {0}".format( cls.fixture_metrics.timer.start_time)) cls.fixture_log.info("{0}".format('=' * 56))
def __init__(self): self.print_mug() self.cl_args = ArgumentParser().parse_args() self.test_env = TestEnvManager( "", self.cl_args.config, test_repo_package_name="") self.test_env.test_data_directory = self.test_env.test_data_directory self.test_env.finalize() cclogging.init_root_log_handler() # This is where things diverge from the regular parallel runner # Extract the runfile contents self._log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) self.datagen_start = time.time() self.run_file = BrewFile(self.cl_args.runfiles) # Log the runfile here so that it appears in the logs before any tests self._log.debug("\n" + str(self.run_file)) # TODO: Once the parallel_runner is changed to a yielding model, # change this to yielding brews instead of generating a list self.suites = SuiteBuilder( testrepos=self.run_file.brew_modules(), dry_run=self.cl_args.dry_run, exit_on_error=True).get_suites() self.print_configuration(self.test_env, brewfile=self.run_file)
def __init__(self, ip_address, username='******', password=None, key=None, connection_timeout=600, retry_interval=10): self.client_log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) # Verify the IP address has a valid format try: IP(ip_address) except ValueError: raise ServerUnreachable(ip_address) if not self._is_instance_reachable(ip_address=ip_address, retry_interval=retry_interval, timeout=connection_timeout): raise ServerUnreachable(ip_address) self.ip_address = ip_address self.username = username self.password = password self.client = WinRMClient(username=username, password=password, host=ip_address) self.client.connect_with_retries()
def __init__(self, ip_address=None, password=None, os_distro=None, config=None, username=None, server_id=None): self._client = InstanceClientFactory.get_instance_client( ip_address=ip_address, password=password, os_distro=os_distro, username=username, server_id=server_id, config=config) self.client_log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__))
def __init__(self, ip_address=None, server_id=None, username=None, password=None, config=None, os_distro=None, key=None): self.client_log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) ssh_timeout = config.connection_timeout if ip_address is None: raise ServerUnreachable("None") self.ip_address = ip_address self.username = username if self.username is None: self.username = '******' self.password = password self.server_id = server_id start = int(time.time()) reachable = False while not reachable: reachable = PingClient.ping(ip_address, config.ip_address_version_for_ssh) time.sleep(config.connection_retry_interval) if int(time.time()) - start >= config.connection_timeout: raise ServerUnreachable(ip_address) self.ssh_client = SSHBaseClient(self.ip_address, self.username, self.password, timeout=ssh_timeout, key=key) if not self.ssh_client.test_connection_auth(): self.client_log.error("Ssh connection failed for: IP:{0} \ Username:{1} Password: {2}".format(self.ip_address, self.username, self.password)) raise SshConnectionException("ssh connection failed")
def deserialize(cls, serialized_str): cls._log = cclogging.getLogger(cclogging.get_object_namespace(cls)) model_object = None deserialization_exception = None try: model_object = cls._prettytable_str_to_obj(serialized_str) except Exception as deserialization_exception: cls._log.exception(deserialization_exception) try: if hasattr(model_object, '_postprocess'): model_object._postprocess() except Exception as post_deserialization_exception: cls._log.error("Unable to run post-deserialization process") cls._log.exception(post_deserialization_exception) if deserialization_exception is not None: try: cls._log.debug( "Deserialization Error: Attempted to deserialize string " "as a prettytable:") cls._log.debug("\n{0}".format( serialized_str.decode(encoding='UTF-8', errors='ignore'))) except Exception as exception: cls._log.exception(exception) cls._log.warning( "Unable to log information regarding the deserialization " "exception") return model_object
def __init__(self, ip_address, username='******', password=None, key=None, connection_timeout=600, retry_interval=10): self.client_log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) # Verify the IP address has a valid format try: IP(ip_address) except ValueError: raise InvalidAddressFormat(ip_address) # Verify the server can be pinged before attempting to connect PingClient.ping_until_reachable(ip_address, timeout=connection_timeout, interval_time=retry_interval) self.ip_address = ip_address self.username = username self.password = password self.client = WinRMClient(username=username, password=password, host=ip_address) connected = self.client.connect_with_retries() if not connected: raise WinRMConnectionException(ip_address=ip_address)
def __init__(self): self.print_mug() self.cl_args = ArgumentParser().parse_args() self.test_env = TestEnvManager("", self.cl_args.config, test_repo_package_name="") self.test_env.test_data_directory = self.test_env.test_data_directory self.test_env.finalize() cclogging.init_root_log_handler() # This is where things diverge from the regular parallel runner # Extract the runfile contents self._log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) self.datagen_start = time.time() self.run_file = BrewFile(self.cl_args.runfiles) # Log the runfile here so that it appears in the logs before any tests self._log.debug("\n" + str(self.run_file)) # TODO: Once the parallel_runner is changed to a yielding model, # change this to yielding brews instead of generating a list self.suites = SuiteBuilder(testrepos=self.run_file.brew_modules(), dry_run=self.cl_args.dry_run, exit_on_error=True).get_suites() self.print_configuration(self.test_env, brewfile=self.run_file)
def __init__(self, ip_address, username='******', password=None, key=None, connection_timeout=600, retry_interval=10): self.client_log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) # Verify the IP address has a valid format try: IP(ip_address) except ValueError: raise ServerUnreachable(ip_address) if not self._is_instance_reachable( ip_address=ip_address, retry_interval=retry_interval, timeout=connection_timeout): raise ServerUnreachable(ip_address) self.ip_address = ip_address self.username = username self.password = password self.client = WinRMClient( username=username, password=password, host=ip_address) connected = self.client.connect_with_retries() if not connected: raise WinRMConnectionException(ip_address=ip_address)
def __init__(self, ip_address=None, password=None, os_distro=None, config=None, username=None, server_id=None, key=None): self._client = InstanceClientFactory.get_instance_client( ip_address=ip_address, password=password, os_distro=os_distro, username=username, server_id=server_id, config=config, key=key) self.client_log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__))
def deserialize(cls, serialized_str): cls._log = cclogging.getLogger(cclogging.get_object_namespace(cls)) model_object = None deserialization_exception = None try: model_object = cls._prettytable_str_to_obj(serialized_str) except Exception as deserialization_exception: cls._log.exception(deserialization_exception) try: if hasattr(model_object, '_postprocess'): model_object._postprocess() except Exception as post_deserialization_exception: cls._log.error("Unable to run post-deserialization process") cls._log.exception(post_deserialization_exception) if deserialization_exception is not None: try: cls._log.debug( "Deserialization Error: Attempted to deserialize string " "as a prettytable:") cls._log.debug("\n{0}".format(serialized_str.decode( encoding='UTF-8', errors='ignore'))) except Exception as exception: cls._log.exception(exception) cls._log.warning( "Unable to log information regarding the deserialization " "exception") return model_object
def deserialize(cls, serialized_str, format_type): cls._log = cclogging.getLogger( cclogging.get_object_namespace(cls)) model_object = None deserialization_exception = None if serialized_str and len(serialized_str) > 0: try: deserialize_method = '_{0}_to_obj'.format(format_type) model_object = getattr(cls, deserialize_method)(serialized_str) except Exception as deserialization_exception: cls._log.exception(deserialization_exception) #Try to log string and format_type if deserialization broke if deserialization_exception is not None: try: cls._log.debug( "Deserialization Error: Attempted to deserialize type" " using type: {0}".format(format_type.decode( encoding='UTF-8', errors='ignore'))) cls._log.debug( "Deserialization Error: Unble to deserialize the " "following:\n{0}".format(serialized_str.decode( encoding='UTF-8', errors='ignore'))) except Exception as exception: cls._log.exception(exception) cls._log.debug( "Unable to log information regarding the " "deserialization exception") return model_object
def __init__(self, ip_address, username='******', password=None, key=None, connection_timeout=600, retry_interval=10): self.client_log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) # Verify the IP address has a valid format try: IP(ip_address) except ValueError: raise InvalidAddressFormat(ip_address) # Verify the server can be pinged before attempting to connect PingClient.ping_until_reachable(ip_address, timeout=connection_timeout, interval_time=retry_interval) self.ip_address = ip_address self.username = username self.password = password self.client = WinRMClient( username=username, password=password, host=ip_address) connected = self.client.connect_with_retries() if not connected: raise WinRMConnectionException(ip_address=ip_address)
def deserialize(cls, serialized_str, format_type): cls._log = cclogging.getLogger(cclogging.get_object_namespace(cls)) model_object = None deserialization_exception = None if serialized_str and len(serialized_str) > 0: try: deserialize_method = '_{0}_to_obj'.format(format_type) model_object = getattr(cls, deserialize_method)(serialized_str) except Exception as deserialization_exception: cls._log.exception(deserialization_exception) # Try to log string and format_type if deserialization broke if deserialization_exception is not None: try: cls._log.debug( "Deserialization Error: Attempted to deserialize type" " using type: {0}".format( format_type.decode(encoding='UTF-8', errors='ignore'))) cls._log.debug( "Deserialization Error: Unble to deserialize the " "following:\n{0}".format( serialized_str.decode(encoding='UTF-8', errors='ignore'))) except Exception as exception: cls._log.exception(exception) cls._log.debug("Unable to log information regarding the " "deserialization exception") return model_object
def __init__(self, config_file_path, section_name): self._log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) self._override = EnvironmentVariableDataSource(section_name) self._data_source = ConfigParserDataSource(config_file_path, section_name) self._section_name = section_name
def generate_rsa_ssh_keys(cls, keyfile_name=None, keyfile_path=None, key_size=1024, pass_phrase=""): """ Generates rsa keys """ engine_config = EngineConfig() _log = cclogging.getLogger(__name__) _log.debug( "Creating RSA keys with name: {0} inside folder: {1}".format( keyfile_name, keyfile_path)) # Build the key file names and path if keyfile_name is None: keyfile_name = "test_ssh_key_{0}".format( str(datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S"))) if keyfile_path is None: keyfile_path = engine_config.temp_directory pub_keyfile_path = os.path.join(keyfile_path, "{0}.pub".format(keyfile_name)) private_key_file_path = os.path.join(keyfile_path, keyfile_name) # If the key files already exist, remove them if os.path.isfile(private_key_file_path): os.remove(private_key_file_path) if os.path.isfile(pub_keyfile_path): os.remove(pub_keyfile_path) try: # Generate the keys private_key = RSA.generate(key_size) public_key = private_key.publickey() except ValueError as msg: _log.error("Key Generate exception: \n {0}".format(msg)) return SSHKeyResponse(error=msg) try: # Create the key files and write the keys onto them with open(pub_keyfile_path, "w") as public_key_file: public_key_file.write( public_key.exportKey(passphrase=pass_phrase)) if not os.path.isfile(pub_keyfile_path): return SSHKeyResponse(error="No public key file created") with open(private_key_file_path, "w") as private_key_file: private_key_file.write( private_key.exportKey(passphrase=pass_phrase)) if not os.path.isfile(private_key_file_path): return SSHKeyResponse(error="No private key file created") else: os.chmod(private_key_file_path, 0700) return SSHKeyResponse(public_key=pub_keyfile_path, private_key=private_key_file_path) except IOError as (errno, strerror): _log.error("I/O error({0}): {1}".format(errno, strerror)) return SSHKeyResponse(error=strerror)
def __init__(self, config_file_path, section_name): self._log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) self._override = EnvironmentVariableDataSource( section_name) self._data_source = ConfigParserDataSource( config_file_path, section_name) self._section_name = section_name
def _start_logging(self, log_file_name): setattr(self.func, "_log_handler", cclogging.setup_new_cchandler(log_file_name)) setattr(self.func, "_log", cclogging.getLogger("")) self.func._log.addHandler(self.func._log_handler) try: curframe = inspect.currentframe() self.func._log.debug("{0} called from {1}".format(self.__name__, inspect.getouterframes(curframe, 2)[2][3])) except: self.func._log.debug("Unable to log where {0} was called from".format(self.__name__))
def __init__(self, ip_address=None, username='******', password=None, key=None, connection_timeout=600, retry_interval=10): self.client_log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) self.ip_address = ip_address self.username = username self.password = password self.connection_timeout = connection_timeout # Verify the IP address has a valid format try: IP(ip_address) except ValueError: raise InvalidAddressFormat(ip_address) # Verify the server can be pinged before attempting to connect PingClient.ping_until_reachable(ip_address, timeout=connection_timeout, interval_time=retry_interval) if key is not None: auth_strategy = SSHAuthStrategy.KEY_STRING else: auth_strategy = SSHAuthStrategy.PASSWORD allow_agent = True if not key: allow_agent = False self.ssh_client = SSHClient(username=self.username, password=self.password, host=self.ip_address, tcp_timeout=20, auth_strategy=auth_strategy, look_for_keys=False, key=key, allow_agent=allow_agent) self.ssh_client.connect_with_timeout(cooldown=20, timeout=connection_timeout) if not self.ssh_client.is_connected(): message = ('SSH timeout after {timeout} seconds: ' 'Could not connect to {ip_address}.') raise SshConnectionException( message.format(timeout=connection_timeout, ip_address=ip_address))
def _start_logging(self, log_file_name): """Starts logging""" setattr(self.func, '_log_handler', cclogging.setup_new_cchandler( log_file_name)) setattr(self.func, '_log', cclogging.getLogger('')) self.func._log.addHandler(self.func._log_handler) try: curframe = inspect.currentframe() self.func._log.debug("{0} called from {1}".format( self.__name__, inspect.getouterframes(curframe, 2)[2][3])) except: self.func._log.debug( "Unable to log where {0} was called from".format( self.__name__))
def ping_using_remote_machine(self, ping_ip_address, count=3): _log = cclogging.getLogger(__name__) # packet count value of 3 comes with the constants command = "{0} {1}".format(SSHBehavior.PING_IPV4_COMMAND_LINUX, count) ping_response = self.client.execute_shell_command("{0} {1}".format( command, ping_ip_address), prompt="$").stdout packet_loss_regex_result = re.search( SSHBehavior.PING_PACKET_LOSS_REGEX, ping_response) if packet_loss_regex_result is None: _log.error( "regex did not match ping response: {0}".format(ping_response)) return False packet_loss_percent = packet_loss_regex_result.group("ping_loss") return int(packet_loss_percent) != 100
def __init__(self, ip_address=None, server_id=None, username=None, password=None, config=None, os_distro=None, key=None): self.client_log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) ssh_timeout = config.connection_timeout if ip_address is None: raise ServerUnreachable("None") self.ip_address = ip_address self.username = username if self.username is None: self.username = '******' self.password = password self.server_id = server_id start = int(time.time()) reachable = False while not reachable: reachable = PingClient.ping(ip_address, config.ip_address_version_for_ssh) time.sleep(config.connection_retry_interval) if int(time.time()) - start >= config.connection_timeout: raise ServerUnreachable(ip_address) if key is not None: auth_strategy = SSHAuthStrategy.KEY_STRING else: auth_strategy = SSHAuthStrategy.PASSWORD self.ssh_client = SSHBehaviors(username=self.username, password=self.password, host=self.ip_address, tcp_timeout=20, auth_strategy=auth_strategy, look_for_keys=False, key=key) self.ssh_client.connect_with_timeout(cooldown=20, timeout=ssh_timeout) if not self.ssh_client.is_connected(): message = ('SSH timeout after {timeout} seconds: ' 'Could not connect to {ip_address}.') raise SshConnectionException( message.format(timeout=ssh_timeout, ip_address=ip_address))
def __init__(self, config_file_path, section_name): self._log = cclogging.getLogger(cclogging.get_object_namespace(self.__class__)) self._section_name = section_name # Check if file path exists if not os.path.exists(config_file_path): msg = "Could not verify the existence of config file at {0}".format(config_file_path) raise NonExistentConfigPathError(msg) with open(config_file_path) as config_file: config_data = config_file.read() try: self._data_source = json.loads(config_data) except Exception as exception: self._log.exception(exception) raise exception
def __init__(self, config_file_path, section_name): self._log = cclogging.getLogger(cclogging.get_object_namespace(self.__class__)) self._data_source = ConfigParser.SafeConfigParser() self._section_name = section_name # Check if the path exists if not os.path.exists(config_file_path): msg = "Could not verify the existence of config file at {0}".format(config_file_path) raise NonExistentConfigPathError(msg) # Read the file in and turn it into a SafeConfigParser instance try: self._data_source.read(config_file_path) except Exception as exception: self._log.exception(exception) raise exception
def ping_using_remote_machine(self, ping_ip_address, count=3): _log = cclogging.getLogger(__name__) # packet count value of 3 comes with the constants command = "{0} {1}".format(SSHBehavior.PING_IPV4_COMMAND_LINUX, count) ping_response = self.client.execute_shell_command( "{0} {1}".format(command, ping_ip_address), prompt="$").stdout packet_loss_regex_result = re.search( SSHBehavior.PING_PACKET_LOSS_REGEX, ping_response) if packet_loss_regex_result is None: _log.error( "regex did not match ping response: {0}".format( ping_response)) return False packet_loss_percent = packet_loss_regex_result.group("ping_loss") return int(packet_loss_percent) != 100
def __init__(self): self.cl_args = _UnittestRunnerCLI().get_cl_args() self.test_env = TestEnvManager( self.cl_args.product, self.cl_args.config, test_repo_package_name=self.cl_args.test_repo) # If something in the cl_args is supposed to override a default, like # say that data directory or something, it needs to happen before # finalize() is called self.test_env.test_data_directory = ( self.test_env.test_data_directory or self.cl_args.data_directory) self.test_env.finalize() cclogging.init_root_log_handler() self._log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) self.product = self.cl_args.product self.print_mug_and_paths(self.test_env)
class PingClient(object): _log = cclogging.getLogger(__name__) #def __init__(self): # super(PingClient, self).__init__() PING_IPV4_COMMAND_LINUX = 'ping -c 3' PING_IPV6_COMMAND_LINUX = 'ping6 -c 3' PING_IPV4_COMMAND_WINDOWS = 'ping' PING_IPV6_COMMAND_WINDOWS = 'ping -6' PING_PACKET_LOSS_REGEX = '(\d{1,3})\.?\d*\%.*loss' @classmethod def ping(cls, ip): """ @summary: Ping a server with a IP @param ip: IP address to ping @type ip: string @return: True if the server was reachable, False otherwise @rtype: bool """ address = IP(ip) ip_address_version = address.version() os_type = platform.system().lower() ping_ipv4 = (cls.PING_IPV4_COMMAND_WINDOWS if os_type == 'windows' else cls.PING_IPV4_COMMAND_LINUX) ping_ipv6 = (cls.PING_IPV6_COMMAND_WINDOWS if os_type == 'windows' else cls.PING_IPV6_COMMAND_LINUX) ping_command = ping_ipv6 if ip_address_version == 6 else ping_ipv4 command = '{command} {address}'.format(command=ping_command, address=ip) cls._log.debug("Executing command '{command}'".format(command=command)) process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE) process.wait() try: packet_loss_percent = re.search(cls.PING_PACKET_LOSS_REGEX, process.stdout.read()).group(1) except Exception: # If there is no match, fail return False cls._log.debug("Pinged {ip} with {packet_loss}% packet loss.".format( ip=ip, packet_loss=packet_loss_percent)) return packet_loss_percent != '100'
def __init__(self): self.cl_args = _UnittestRunnerCLI().get_cl_args() self.test_env = TestEnvManager( self.cl_args.product, self.cl_args.config, test_repo_package_name=self.cl_args.test_repo) # If something in the cl_args is supposed to override a default, like # say that data directory or something, it needs to happen before # finalize() is called self.test_env.test_data_directory = (self.test_env.test_data_directory or self.cl_args.data_directory) self.test_env.finalize() cclogging.init_root_log_handler() self._log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) self.product = self.cl_args.product self.print_mug_and_paths(self.test_env)
def __init__(self, config_file_path, section_name): self._log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) self._data_source = configparser.SafeConfigParser() self._section_name = section_name # Check if the path exists if not os.path.exists(config_file_path): msg = 'Could not verify the existence of config file at {0}'\ .format(config_file_path) raise NonExistentConfigPathError(msg) # Read the file in and turn it into a SafeConfigParser instance try: self._data_source.read(config_file_path) except Exception as exception: self._log.exception(exception) raise exception
def __init__(self, config_file_path, section_name): self._log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) self._section_name = section_name # Check if file path exists if not os.path.exists(config_file_path): msg = 'Could not verify the existence of config file at {0}'\ .format(config_file_path) raise NonExistentConfigPathError(msg) with open(config_file_path) as config_file: config_data = config_file.read() try: self._data_source = json.loads(config_data) except Exception as exception: self._log.exception(exception) raise exception
def __init__(self, use_proxy=True, proxy_os=LINUX, ip_version=4, logger=None, debug=False): """ Proxy Server Constructor @param use_proxy: (Boolean) - Is there a proxy/bastion that should execute commands or be used as a hop to another address? True - Yes False - No, execute cmds from the localhost. @param proxy_os: (ENUM) - Support for multiple OSs. A hook for future functionality. Only supports Linux currently. @param ip_version: Version to use by default, if utilities differ across IP versions. @param logger: Logging functionality. @param debug: (Boolean) Used for debugging system and mixin utilities @return: None """ self.use_proxy = use_proxy self._proxy_svr = None self._proxy_ip = None self._proxy_os = proxy_os self._ip_version = ip_version self.logger = logger or cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) self.connection = None self.debug = debug self.session_password = None self.prompt_pattern = self.PROMPT_PATTERN self.last_response = None # Track IPs (hops) currently connected to... self._conn_path = [] # Delay between commands if iterating a list of commands self._pexpect_cmd_delay = self.STANDARD_CMD_DELAY
def __init__(self, ip_address=None, username='******', password=None, key=None, connection_timeout=600, retry_interval=10): self.client_log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) self.ip_address = ip_address self.username = username self.password = password self.connection_timeout = connection_timeout # Verify the IP address has a valid format try: IP(ip_address) except ValueError: raise InvalidAddressFormat(ip_address) # Verify the server can be pinged before attempting to connect PingClient.ping_until_reachable(ip_address, timeout=connection_timeout, interval_time=retry_interval) if key is not None: auth_strategy = SSHAuthStrategy.KEY_STRING else: auth_strategy = SSHAuthStrategy.PASSWORD allow_agent = True if not key: allow_agent = False self.ssh_client = SSHClient( username=self.username, password=self.password, host=self.ip_address, tcp_timeout=20, auth_strategy=auth_strategy, look_for_keys=False, key=key, allow_agent=allow_agent) self.ssh_client.connect_with_timeout( cooldown=20, timeout=connection_timeout) if not self.ssh_client.is_connected(): message = ('SSH timeout after {timeout} seconds: ' 'Could not connect to {ip_address}.') raise SshConnectionException(message.format( timeout=connection_timeout, ip_address=ip_address))
def __init__(self, ip_address=None, username='******', password=None, key=None, connection_timeout=600, retry_interval=10): self.client_log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) if ip_address is None: raise ServerUnreachable("None") self.ip_address = ip_address self.username = username self.password = password self.connection_timeout = connection_timeout # Verify the server can be pinged before attempting to connect start = int(time.time()) reachable = False while not reachable: reachable = PingClient.ping(ip_address) if reachable: break time.sleep(retry_interval) if int(time.time()) - start >= connection_timeout: raise ServerUnreachable( 'Could not reach the server at {ip_address}'.format( ip_address=ip_address)) if key is not None: auth_strategy = SSHAuthStrategy.KEY_STRING else: auth_strategy = SSHAuthStrategy.PASSWORD self.ssh_client = SSHClient( username=self.username, password=self.password, host=self.ip_address, tcp_timeout=20, auth_strategy=auth_strategy, look_for_keys=False, key=key) self.ssh_client.connect_with_timeout( cooldown=20, timeout=connection_timeout) if not self.ssh_client.is_connected(): message = ('SSH timeout after {timeout} seconds: ' 'Could not connect to {ip_address}.') raise SshConnectionException(message.format( timeout=connection_timeout, ip_address=ip_address))
def __init__(self, ip_address=None, server_id=None, username=None, password=None, config=None, os_distro=None, key=None): self.client_log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) ssh_timeout = config.connection_timeout if ip_address is None: raise ServerUnreachable("None") self.ip_address = ip_address self.username = username if self.username is None: self.username = '******' self.password = password self.server_id = server_id start = int(time.time()) reachable = False while not reachable: reachable = PingClient.ping(ip_address, config.ip_address_version_for_ssh) time.sleep(config.connection_retry_interval) if int(time.time()) - start >= config.connection_timeout: raise ServerUnreachable(ip_address) if key is not None: auth_strategy = SSHAuthStrategy.KEY_STRING else: auth_strategy = SSHAuthStrategy.PASSWORD self.ssh_client = SSHBehaviors( username=self.username, password=self.password, host=self.ip_address, tcp_timeout=20, auth_strategy=auth_strategy, look_for_keys=False, key=key) self.ssh_client.connect_with_timeout(cooldown=20, timeout=ssh_timeout) if not self.ssh_client.is_connected(): message = ('SSH timeout after {timeout} seconds: ' 'Could not connect to {ip_address}.') raise SshConnectionException(message.format( timeout=ssh_timeout, ip_address=ip_address))
def __init__(self, ip_address=None, username='******', password=None, key=None, connection_timeout=600, retry_interval=10): self.client_log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) if ip_address is None: raise ServerUnreachable("None") self.ip_address = ip_address self.username = username self.password = password self.connection_timeout = connection_timeout # Verify the server can be pinged before attempting to connect start = int(time.time()) reachable = False while not reachable: reachable = PingClient.ping(ip_address) if reachable: break time.sleep(retry_interval) if int(time.time()) - start >= connection_timeout: raise ServerUnreachable(ip_address) if key is not None: auth_strategy = SSHAuthStrategy.KEY_STRING else: auth_strategy = SSHAuthStrategy.PASSWORD self.ssh_client = SSHClient( username=self.username, password=self.password, host=self.ip_address, tcp_timeout=20, auth_strategy=auth_strategy, look_for_keys=False, key=key) self.ssh_client.connect_with_timeout( cooldown=20, timeout=connection_timeout) if not self.ssh_client.is_connected(): message = ('SSH timeout after {timeout} seconds: ' 'Could not connect to {ip_address}.') raise SshConnectionException(message.format( timeout=connection_timeout, ip_address=ip_address))
def __init__(self, files): """Accepts mutiple (config-like) run files and generates a consolidated representation of them, enforcing rules during parsing. A BrewFile is a SafeConfigParser file, except: The section 'cli-defaults' is special and can only be used for defining defaults for optional command-line arguments. (NOTE: This feature is not yet implemented) All keys in any given section must be unique. All section names across all files passed into BrewFile must be unique, with the exception of 'defaults' and 'cli-defaults', which are special and not vetted. The section 'cli-defaults' should only appear once across all files passed into BrewFile. """ self._log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) self.files = files self._data = self._validate_runfiles(files)
class ObjectStorageAPIClient(HTTPClient): _log = cclogging.getLogger(__name__) def __init__(self, storage_url, auth_token, base_container_name=None, base_object_name=None): super(ObjectStorageAPIClient, self).__init__() self.engine_config = EngineConfig() self.temp_dir = expanduser(self.engine_config.temp_directory) self.swift_endpoint = storage_url.split('/v1/')[0] self.storage_url = storage_url self.auth_token = auth_token self.base_container_name = base_container_name or '' self.base_object_name = base_object_name or '' self.default_headers['X-Auth-Token'] = self.auth_token self._swift_features = None @_log_transaction(log=_log) def request(self, method, url, headers=None, params=None, data=None, requestslib_kwargs=None): """ Overrides the HTTPClient's 'request' method, to prevent it from calling BaseHTTPClient's 'request' method, so we can provide our own logging decorator. @param method: HTTP method to use in the request. @type method: string @param url: URL to make the request to. @type url: string @param: headers: headers to use with the request. @type headers: dict @param: params: query string parameters to use with the request. @type param: dict @param data: data to send in the reqest. @type data: string @param requestlib_kwargs: kwargs to be passed to requests. @type requestlib_kwargs: dict """ # set requestslib_kwargs to an empty dict if None requestslib_kwargs = requestslib_kwargs if (requestslib_kwargs is not None) else {} # Set defaults params = params if params is not None else {} verify = False # If headers are provided by both, headers "wins" over default_headers headers = dict(self.default_headers, **(headers or {})) # Override url if present in requestslib_kwargs if 'url' in requestslib_kwargs.keys(): url = requestslib_kwargs.get('url', None) or url del requestslib_kwargs['url'] # Override method if present in requestslib_kwargs if 'method' in requestslib_kwargs.keys(): method = requestslib_kwargs.get('method', None) or method del requestslib_kwargs['method'] # The requests lib already removes None key/value pairs, but we # force it here in case that behavior ever changes for key in requestslib_kwargs.keys(): if requestslib_kwargs[key] is None: del requestslib_kwargs[key] # Create the final parameters for the call to the base request() # Wherever a parameter is provided both by the calling method AND # the requests_lib kwargs dictionary, requestslib_kwargs "wins" requestslib_kwargs = dict( { 'headers': headers, 'params': params, 'verify': verify, 'data': data }, **requestslib_kwargs) # Make the request return requests.request(method, url, **requestslib_kwargs) def get_swift_info(self, headers=None, params=None, requestslib_kwargs=None): """ Returns Swift info. @param headers: headers to be added to the HTTP request. @type headers: dictionary @param params: query string parameters to be added to the HTTP request. @type params: dictionary @param requestslib_kwargs: keyword arguments to be passed on to python requests. @type requestslib_kwargs: dictionary @return: Swift info @rtype: response object """ info_url = '{0}/info'.format(self.swift_endpoint) return self.get(info_url, headers=headers, params=params, requestslib_kwargs=requestslib_kwargs) # Account---------------------------------------------------------------- def retrieve_account_metadata(self): response = self.head(self.storage_url) return response @deserialize(AccountContainersList) def list_containers(self, headers=None, params=None, requestslib_kwargs=None): """ Lists all containers for the account. If the 'format' variable is passed as part of the 'params' dictionary, an object representing the deserialized version of that format (either xml or json) will be appended to the response as the 'entity' attribute. (ie, response.entity) @param headers: headers to be added to the HTTP request. @type headers: dictionary @param params: query string parameters to be added to the HTTP request. @type params: dictionary @param requestslib_kwargs: keyword arguments to be passed on to python requests. @type requestslib_kwargs: dictionary @return: response object @rtype: object """ response = self.get(self.storage_url, headers=headers, params=params, requestslib_kwargs=requestslib_kwargs) return response # Container-------------------------------------------------------------- def get_container_metadata(self, container_name, headers=None, requestslib_kwargs=None): url = '{0}/{1}'.format(self.storage_url, container_name) response = self.head(url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response def create_container(self, container_name, headers=None, requestslib_kwargs=None): url = '{0}/{1}'.format(self.storage_url, container_name) response = self.put(url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response def delete_container(self, container_name, headers=None, requestslib_kwargs=None): url = '{0}/{1}'.format(self.storage_url, container_name) response = self.delete(url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response def set_container_metadata(self, container_name, headers=None, requestslib_kwargs=None): url = '{0}/{1}'.format(self.storage_url, container_name) response = self.post(url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response def get_container_options(self, container_name, headers=None, requestslib_kwargs=None): """ returns response from CORS option call """ url = '{0}/{1}'.format(self.storage_url, container_name) response = self.options(url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response @deserialize(ContainerObjectsList) def list_objects(self, container_name, headers=None, params=None, requestslib_kwargs=None): """ Lists all objects in the specified container. If the 'format' variable is passed as part of the 'params' dictionary, an object representing the deserialized version of that format (either xml or json) will be appended to the response as the 'entity' attribute. (ie, response.entity) @param container_name: container to list the object from. @type container_name: string @param headers: headers to be added to the HTTP request. @type headers: dictionary @param params: query string parameters to be added to the HTTP request. @type params: dictionary @param requestslib_kwargs: keyword arguments to be passed on to python requests. @type requestslib_kwargs: dictionary @return: response object @rtype: object """ url = '{0}/{1}'.format(self.storage_url, container_name) response = self.get(url, headers=headers, params=params, requestslib_kwargs=requestslib_kwargs) return response # Storage Object-------------------------------------------------------- def get_object(self, container_name, object_name, headers=None, params=None, stream=False, requestslib_kwargs=None): """ optional headers If-Match If-None-Match If-Modified-Since If-Unmodified-Since Range If-Match and If-None-Match check the ETag header 200 on 'If' header success If none of the entity tags match, or if "*" is given and no current entity exists, the server MUST NOT perform the requested method, and MUST return a 412 (Precondition Failed) response. 206 (Partial content) for successful range request If the entity tag does not match, then the server SHOULD return the entire entity using a 200 (OK) response see RFC2616 If prefetch=False, body download is delayed until response.content is accessed either directly, via response.iter_content() or .iter_lines() """ url = '{0}/{1}/{2}'.format(self.storage_url, container_name, object_name) response = self.get(url, headers=headers, params=params, requestslib_kwargs={'stream': stream}) return response def create_object(self, container_name, object_name, data=None, headers=None, params=None, requestslib_kwargs=None): """ Creates a storage object in a container via PUT """ url = '{0}/{1}/{2}'.format(self.storage_url, container_name, object_name) response = self.put(url, data=data, headers=headers, params=params, requestslib_kwargs=requestslib_kwargs) return response @deserialize(CreateArchiveObject) def create_archive_object(self, data, extract_archive_param, upload_path='', headers=None, requestslib_kwargs=None): """ Extracts an archive to object(s) via PUT to the storage url extract-archive param formats: tar, tar.gz, tar.bz2 upload_path notes: given an archive: archive file1/name1 file2/name2 file3 file4 if no upload path is given then the filenames in the archive will be extracted to container file1 with obj name1, container file2 with obj name2, etc. and obj names without slashes will be ignored. if the upload path is 'container_foo' all the objects will be extracted to 'container_foo' with obj names file1/name1, file2/name2, file3...file_n if the upload path is container_foo/bar then the objects will be extracted to container_foo with the obj name prefix of 'bar' ie bar/file1/name1, bar/file2/name2, bar/file3...bar/file_n """ url = '{0}/{1}'.format(self.storage_url, upload_path) params = {"extract-archive": extract_archive_param} response = self.put(url, data=data, headers=headers, params=params, requestslib_kwargs=requestslib_kwargs) return response def copy_object(self, container_name, object_name, headers=None, requestslib_kwargs=None): url = '{0}/{1}/{2}'.format(self.storage_url, container_name, object_name) if 'X-Copy-From' in headers: method = 'PUT' if 'Content-Length' not in headers: headers['Content-Length'] = '0' elif 'Destination' in headers: method = 'COPY' else: return None response = self.request(method=method, url=url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response def delete_object(self, container_name, object_name, headers=None, requestslib_kwargs=None): url = '{0}/{1}/{2}'.format(self.storage_url, container_name, object_name) response = self.delete(url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response def get_object_metadata(self, container_name, object_name, headers=None, requestslib_kwargs=None): url = '{0}/{1}/{2}'.format(self.storage_url, container_name, object_name) response = self.head(url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response def set_object_metadata(self, container_name, object_name, headers=None, requestslib_kwargs=None): url = '{0}/{1}/{2}'.format(self.storage_url, container_name, object_name) response = self.post(url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response def set_temp_url_key(self, headers=None, requestslib_kwargs=None): response = self.post(self.storage_url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response def create_temp_url(self, method, container, obj, seconds, key): method = method.upper() base_url = '{0}/{1}/{2}'.format(self.storage_url, container, obj) account_hash = self.storage_url.split('/v1/')[1] object_path = '/v1/{0}/{1}/{2}'.format(account_hash, container, obj) seconds = int(seconds) expires = int(time() + seconds) hmac_body = '{0}\n{1}\n{2}'.format(method, expires, object_path) sig = hmac.new(key, hmac_body, sha1).hexdigest() return {'target_url': base_url, 'signature': sig, 'expires': expires} def create_formpost(self, container, files, object_prefix='', redirect='http://example.com/formpost', max_file_size=104857600, max_file_count=10, expires=None, key='', signature="", x_delete_at=None, x_delete_after=None): """ Creates RFC-2388. @param container: Name of the container to post objects to. @type container: string @param files: Files to post in the form. The dictionaries representing a file should be formatted as follows: { 'name': '<form name>', 'filename': '<filename>', 'content_type': '<content_type>', 'data': '<filedata>' } Where only name is required, defaults to other values will be as follows: filename - the value stored in name. content_type - 'text/plain' data - the md5 hash of the value stored in name. @type files: list of dictionaries @param object_prefix: prefix to be used in the name of the objects created. @type object_prefix: string @param redirect: URL to be returned as the 'location' header in the HTTP response. @type redirect: string @param max_file_size: The maximum file size in bytes which can be uploaded with the form. @type max_file_size: int @param max_file_count: The maximum number of files allowed to be uploaded with the form. @type max_file_count: int @param expires: The unix time relating to when the form expires and will no longer allow uploads to the container. @type expires: int @param key: The account's X-Tempurl-Key used in creating the signatre which authorizes the form to be POSTed. @type key: string @param signature: The HMAC-SHA1 signature of the form. @type signature: string @param x_delete_at: The unix time relating to when the object will be deleted from the container. @type x_delete_at: int @param x_delete_after: The amount of time, in seconds, after which the object will be deleted from the container. @type x_delete_after: int @return: Data to be POSTed in the following format: { 'target_url': '<url to POST to>', 'headers': '<headers to be added to the request>, 'body': '<body to be posted to the target url>' } @rtype: dictionary """ base_url, path = self.storage_url.split('/v1') path = '/v1{0}/{1}'.format(path, container) if object_prefix: path = '{0}/{1}'.format(path, object_prefix) if not expires: expires = int(time() + 600) url = ''.join([base_url, path]) hmac_body = '{0}\n{1}\n{2}\n{3}\n{4}'.format(path, redirect, max_file_size, max_file_count, expires) if not signature: signature = hmac.new(key, hmac_body, sha1).hexdigest() form = [] if redirect != '': form.append({ 'headers': { 'Content-Disposition': 'form-data; name="redirect"' }, 'data': redirect }) form.append({ 'headers': { 'Content-Disposition': 'form-data; name="max_file_size"' }, 'data': str(max_file_size) }) form.append({ 'headers': { 'Content-Disposition': 'form-data; name="max_file_count"' }, 'data': str(max_file_count) }) form.append({ 'headers': { 'Content-Disposition': 'form-data; name="expires"' }, 'data': str(expires) }) if x_delete_at: form.append({ 'headers': { 'Content-Disposition': 'form-data; name="x_delete_at"' }, 'data': str(x_delete_at) }) if x_delete_after: form.append({ 'headers': { 'Content-Disposition': 'form-data; name="x_delete_after"' }, 'data': str(x_delete_after) }) form.append({ 'headers': { 'Content-Disposition': 'form-data; name="signature"' }, 'data': signature }) for data_file in files: form_name = data_file.get('name') form_filename = data_file.get('filename', form_name) form_content_type = data_file.get('content_type', 'text/plain') form_data = data_file.get('data', get_md5_hash(form_name)) form.append({ 'headers': { 'Content-Disposition': 'form-data; name="{0}"; filename="{1}"'.format( form_name, form_filename), 'Content-Type': form_content_type }, 'data': form_data }) data = [] boundary = '----WebKitFormBoundary40Q4WaJHO84PBBIa' for section in form: data.append('--{0}\r\n'.format(boundary)) for key, value in section['headers'].iteritems(): data.append('{0}: {1}\r\n'.format(key, value)) data.append('\r\n') data.append(section['data']) data.append('\r\n') data.append('\r\n--{0}'.format(boundary)) post_headers = { 'Cache-Control': 'max-age=0', 'Accept': '*/*;q=0.8', 'Content-Type': 'multipart/form-data; boundary={0}'.format(boundary) } return { 'target_url': url, 'headers': post_headers, 'body': ''.join(data) } def create_archive(self, object_names, compression_type, archive_name=BULK_ARCHIVE_NAME): """ Bulk creates objects in the opencafe's temp directory specified in the engine config. Each object's data will be the md5sum of the object's name. @type object_names: strings @param object_names: a list of object names @type object_names: string @param object_names: file compression to apply to the archive @rtype: string @return: Returns full path of the archive that was created in opencafe's temp directory specified in the engine config """ supported = [None, "gz", "bz2"] if compression_type not in supported: raise NameError("supported compression: {0}".format(supported)) ext = '' if not compression_type: ext = 'tar' compression_type = '' else: ext = 'tar.{0}'.format(compression_type) archive_name = '{0}_{1}.{2}'.format(archive_name, randstring.get_random_string(), ext) archive_filename = '{0}/{1}'.format(self.temp_dir, archive_name) archive = tarfile.open(archive_filename, 'w:{0}'.format(compression_type)) for object_name in object_names: object_data = get_md5_hash(object_name) object_size = len(object_data) object_time = int(mktime(datetime.now().timetuple())) object_buffer = StringIO(object_data) object_buffer.seek(0) object_info = tarfile.TarInfo(name=object_name) object_info.size = object_size object_info.mtime = object_time archive.addfile(tarinfo=object_info, fileobj=object_buffer) archive.close() archive_path = "{0}/{1}".format(self.temp_dir, archive_name) return archive_path def bulk_delete(self, targets, headers=None, requestslib_kwargs=None): """ Deletes container/objects from an account. @type targets: list of strings @param targets: A list of the '/container/object' or '/container' to be bulk deleted. Note, bulk delete will not remove containers that have objects in them, and there is limit of 1000 containers/objects per delete. @rtype: object @return: The requests response object returned from the call. """ if not headers: headers = {} url = '{0}{1}'.format(self.storage_url, '?bulk-delete') data = '\n'.join([urllib.quote(target) for target in targets]) headers['content-type'] = 'text/plain' headers['content-length'] = str(len(data)) response = self.request('DELETE', url, data=data, headers=headers, requestslib_kwargs=requestslib_kwargs) return response
def __init__(self): self._log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__))
def __init__(self, parent_object): self.log = getLogger('') self.log_handler = setup_new_cchandler( get_object_namespace(parent_object)) self._is_logging = False
def __init__(self): super(AutoMarshallingModel, self).__init__() self._log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__))
class XenAPIClient(BaseClient): _log = cclogging.getLogger(__name__) def __init__(self, url=None, username=None, password=None): """ Initialization @param url: URL for the target XenServer instance @type url: string @param username: Username used to connect to XenServer @type username: string @param password: Password for the provided username @type password: string """ self.session = XenAPI.Session(url) self.session.xenapi.login_with_password(username, password) @_log_failure(log=_log) def get_vm_record(self, server_id): """ Retrieves a VM Record for the given Compute uuid @param server_id: The uuid of the Compute instance @type server_id: string @rtype: VirtualMachine """ vm = self._get_vm_by_compute_id(server_id) record = self.session.xenapi.VM.get_record(vm) virtual_machine = VirtualMachine._dict_to_obj(**record) return virtual_machine @_log_failure(log=_log) def get_vbd_record(self, vbd): """ Retrieves a VBD Record @param vbd: The OpaqueRef of the VBD @type vbd: string @rtype: VirtualBlockDevice """ record = self.session.xenapi.VBD.get_record(vbd) block_device = VirtualBlockDevice._dict_to_obj(record) return block_device @_log_failure(log=_log) def get_vdi_record(self, vdi): """ Retrieves a VDI Record @param vdi: The OpaqueRef of the VDI @type vdi: string @rtype: VirtualDiskImage """ record = self.session.xenapi.VDI.get_record(vdi) vdi = VirtualDiskImage._dict_to_obj(record) return vdi @_log_failure(log=_log) def _get_vm_by_compute_id(self, server_id): """ Retrieves VM given a Compute uuid @param server_id: The Compute uuid of an instance @type server_id: string @rtype: VM """ expected_vm_name = 'instance-{uuid}'.format(uuid=server_id) virtual_machines = self.session.xenapi.VM.get_all() for vm in virtual_machines: vm_name = self.session.xenapi.VM.get_name_label(vm) if expected_vm_name == vm_name: return vm return None
def __init__(self, section_name): self._log = cclogging.getLogger( cclogging.get_object_namespace(self.__class__)) self._section_name = section_name
class ObjectStorageAPIClient(HTTPClient): _log = cclogging.getLogger(__name__) def __init__(self, storage_url, auth_token, base_container_name=None, base_object_name=None): super(ObjectStorageAPIClient, self).__init__() self.engine_config = EngineConfig() self.temp_dir = expanduser(self.engine_config.temp_directory) self.swift_endpoint = storage_url.split('/v1/')[0] self.storage_url = storage_url self.auth_token = auth_token self.base_container_name = base_container_name or '' self.base_object_name = base_object_name or '' self.default_headers['X-Auth-Token'] = self.auth_token self._swift_features = None @_log_transaction(log=_log) def request(self, method, url, headers=None, params=None, data=None, requestslib_kwargs=None): """ Overrides the HTTPClient's 'request' method, to prevent it from calling BaseHTTPClient's 'request' method, so we can provide our own logging decorator. @param method: HTTP method to use in the request. @type method: string @param url: URL to make the request to. @type url: string @param: headers: headers to use with the request. @type headers: dict @param: params: query string parameters to use with the request. @type param: dict @param data: data to send in the reqest. @type data: string @param requestlib_kwargs: kwargs to be passed to requests. @type requestlib_kwargs: dict """ # set requestslib_kwargs to an empty dict if None requestslib_kwargs = requestslib_kwargs if (requestslib_kwargs is not None) else {} # Set defaults params = params if params is not None else {} verify = False # If headers are provided by both, headers "wins" over default_headers headers = dict(self.default_headers, **(headers or {})) for header in headers: if not isinstance(headers[header], str): headers[header] = str(headers[header]) # Override url if present in requestslib_kwargs if 'url' in requestslib_kwargs.keys(): url = requestslib_kwargs.get('url', None) or url del requestslib_kwargs['url'] # Override method if present in requestslib_kwargs if 'method' in requestslib_kwargs.keys(): method = requestslib_kwargs.get('method', None) or method del requestslib_kwargs['method'] # The requests lib already removes None key/value pairs, but we # force it here in case that behavior ever changes for key in requestslib_kwargs.keys(): if requestslib_kwargs[key] is None: del requestslib_kwargs[key] # Create the final parameters for the call to the base request() # Wherever a parameter is provided both by the calling method AND # the requests_lib kwargs dictionary, requestslib_kwargs "wins" requestslib_kwargs = dict( { 'headers': headers, 'params': params, 'verify': verify, 'data': data }, **requestslib_kwargs) # Make the request return requests.request(method, url, **requestslib_kwargs) def get_swift_info(self, headers=None, params=None, requestslib_kwargs=None): """ Returns Swift info. @param headers: headers to be added to the HTTP request. @type headers: dictionary @param params: query string parameters to be added to the HTTP request. @type params: dictionary @param requestslib_kwargs: keyword arguments to be passed on to python requests. @type requestslib_kwargs: dictionary @return: Swift info @rtype: response object """ info_url = '{0}/info'.format(self.swift_endpoint) return self.get(info_url, headers=headers, params=params, requestslib_kwargs=requestslib_kwargs) def health_check(self, headers=None, params=None, requestslib_kwargs=None): """ Returns Health Check. @param headers: headers to be added to the HTTP request. @type headers: dictionary @param params: query string parameters to be added to the HTTP request. @type params: dictionary @param requestslib_kwargs: keyword arguments to be passed on to python requests. @type requestslib_kwargs: dictionary @return: response object @rtype: object """ parsed_url = urlparse(self.storage_url) health_url = "{0}://{1}/healthcheck".format(parsed_url.scheme, parsed_url.netloc) return self.get(health_url, headers=headers, params=params, requestslib_kwargs=requestslib_kwargs) # Account---------------------------------------------------------------- def get_account_metadata(self): response = self.head(self.storage_url) return response @deserialize(AccountContainersList) def list_containers(self, headers=None, params=None, requestslib_kwargs=None): """ Lists all containers for the account. If the 'format' variable is passed as part of the 'params' dictionary, an object representing the deserialized version of that format (either xml or json) will be appended to the response as the 'entity' attribute. (ie, response.entity) @param headers: headers to be added to the HTTP request. @type headers: dictionary @param params: query string parameters to be added to the HTTP request. @type params: dictionary @param requestslib_kwargs: keyword arguments to be passed on to python requests. @type requestslib_kwargs: dictionary @return: response object @rtype: object """ response = self.get(self.storage_url, headers=headers, params=params, requestslib_kwargs=requestslib_kwargs) return response # Container-------------------------------------------------------------- def get_container_metadata(self, container_name, headers=None, requestslib_kwargs=None): url = '{0}/{1}'.format(self.storage_url, container_name) response = self.head(url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response def create_container(self, container_name, headers=None, requestslib_kwargs=None): url = '{0}/{1}'.format(self.storage_url, container_name) response = self.put(url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response def delete_container(self, container_name, headers=None, requestslib_kwargs=None): url = '{0}/{1}'.format(self.storage_url, container_name) response = self.delete(url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response def set_container_metadata(self, container_name, headers=None, requestslib_kwargs=None): url = '{0}/{1}'.format(self.storage_url, container_name) response = self.post(url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response def get_container_options(self, container_name, headers=None, requestslib_kwargs=None): """ returns response from CORS option call """ url = '{0}/{1}'.format(self.storage_url, container_name) response = self.options(url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response @deserialize(ContainerObjectsList) def list_objects(self, container_name, headers=None, params=None, requestslib_kwargs=None): """ Lists all objects in the specified container. If the 'format' variable is passed as part of the 'params' dictionary, an object representing the deserialized version of that format (either xml or json) will be appended to the response as the 'entity' attribute. (ie, response.entity) @param container_name: container to list the object from. @type container_name: string @param headers: headers to be added to the HTTP request. @type headers: dictionary @param params: query string parameters to be added to the HTTP request. @type params: dictionary @param requestslib_kwargs: keyword arguments to be passed on to python requests. @type requestslib_kwargs: dictionary @return: response object @rtype: object """ url = '{0}/{1}'.format(self.storage_url, container_name) response = self.get(url, headers=headers, params=params, requestslib_kwargs=requestslib_kwargs) return response # Storage Object-------------------------------------------------------- def get_object(self, container_name, object_name, headers=None, params=None, stream=False, requestslib_kwargs=None): """ optional headers If-Match If-None-Match If-Modified-Since If-Unmodified-Since Range If-Match and If-None-Match check the ETag header 200 on 'If' header success If none of the entity tags match, or if "*" is given and no current entity exists, the server MUST NOT perform the requested method, and MUST return a 412 (Precondition Failed) response. 206 (Partial content) for successful range request If the entity tag does not match, then the server SHOULD return the entire entity using a 200 (OK) response see RFC2616 If prefetch=False, body download is delayed until response.content is accessed either directly, via response.iter_content() or .iter_lines() """ url = '{0}/{1}/{2}'.format(self.storage_url, container_name, object_name) response = self.get(url, headers=headers, params=params, requestslib_kwargs={'stream': stream}) return response def create_object(self, container_name, object_name, data=None, headers=None, params=None, requestslib_kwargs=None): """ Creates a storage object in a container via PUT """ url = '{0}/{1}/{2}'.format(self.storage_url, container_name, object_name) response = self.put(url, data=data, headers=headers, params=params, requestslib_kwargs=requestslib_kwargs) return response @deserialize(CreateArchiveObject) def create_archive_object(self, data, extract_archive_param, upload_path='', headers=None, requestslib_kwargs=None): """ Extracts an archive to object(s) via PUT to the storage url extract-archive param formats: tar, tar.gz, tar.bz2 upload_path notes: given an archive: archive file1/name1 file2/name2 file3 file4 if no upload path is given then the filenames in the archive will be extracted to container file1 with obj name1, container file2 with obj name2, etc. and obj names without slashes will be ignored. if the upload path is 'container_foo' all the objects will be extracted to 'container_foo' with obj names file1/name1, file2/name2, file3...file_n if the upload path is container_foo/bar then the objects will be extracted to container_foo with the obj name prefix of 'bar' ie bar/file1/name1, bar/file2/name2, bar/file3...bar/file_n """ url = '{0}/{1}'.format(self.storage_url, upload_path) params = {"extract-archive": extract_archive_param} response = self.put(url, data=data, headers=headers, params=params, requestslib_kwargs=requestslib_kwargs) return response def copy_object(self, container_name, object_name, headers=None, requestslib_kwargs=None): url = '{0}/{1}/{2}'.format(self.storage_url, container_name, object_name) if 'X-Copy-From' in headers: method = 'PUT' if 'Content-Length' not in headers: headers['Content-Length'] = '0' elif 'Destination' in headers: method = 'COPY' else: return None response = self.request(method=method, url=url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response def delete_object(self, container_name, object_name, headers=None, requestslib_kwargs=None): url = '{0}/{1}/{2}'.format(self.storage_url, container_name, object_name) response = self.delete(url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response def get_object_metadata(self, container_name, object_name, headers=None, requestslib_kwargs=None): url = '{0}/{1}/{2}'.format(self.storage_url, container_name, object_name) response = self.head(url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response def set_object_metadata(self, container_name, object_name, headers, requestslib_kwargs=None): url = '{0}/{1}/{2}'.format(self.storage_url, container_name, object_name) response = self.post(url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response def set_temp_url_key(self, container_name=None, headers=None, requestslib_kwargs=None): """ optional container name is for setting the tempurl at the container level, otherwise key is set at the account level. """ url = self.storage_url if container_name: url = "{0}/{1}".format(self.storage_url, container_name) response = self.post(url, headers=headers, requestslib_kwargs=requestslib_kwargs) return response def create_temp_url(self, method, container, obj, seconds, key): method = method.upper() base_url = '{0}/{1}/{2}'.format(self.storage_url, container, obj) account_hash = self.storage_url.split('/v1/')[1] object_path = '/v1/{0}/{1}/{2}'.format(account_hash, container, obj) seconds = int(seconds) expires = int(time() + seconds) hmac_body = '{0}\n{1}\n{2}'.format(method, expires, object_path) sig = hmac.new(key, hmac_body, sha1).hexdigest() return {'target_url': base_url, 'signature': sig, 'expires': expires} def create_archive(self, object_names, compression_type, archive_name=BULK_ARCHIVE_NAME): """ Bulk creates objects in the opencafe's temp directory specified in the engine config. Each object's data will be the md5sum of the object's name. @type object_names: strings @param object_names: a list of object names @type object_names: string @param object_names: file compression to apply to the archive @rtype: string @return: Returns full path of the archive that was created in opencafe's temp directory specified in the engine config """ supported = [None, "gz", "bz2"] if compression_type not in supported: raise NameError("supported compression: {0}".format(supported)) ext = '' if not compression_type: ext = 'tar' compression_type = '' else: ext = 'tar.{0}'.format(compression_type) archive_name = '{0}_{1}.{2}'.format(archive_name, randstring.get_random_string(), ext) archive_filename = '{0}/{1}'.format(self.temp_dir, archive_name) archive = tarfile.open(archive_filename, 'w:{0}'.format(compression_type)) for object_name in object_names: object_data = get_md5_hash(object_name) object_size = len(object_data) object_time = int(mktime(datetime.now().timetuple())) object_buffer = StringIO(object_data) object_buffer.seek(0) object_info = tarfile.TarInfo(name=object_name) object_info.size = object_size object_info.mtime = object_time archive.addfile(tarinfo=object_info, fileobj=object_buffer) archive.close() archive_path = "{0}/{1}".format(self.temp_dir, archive_name) return archive_path def bulk_delete(self, targets, headers=None, requestslib_kwargs=None): """ Deletes container/objects from an account. @type targets: list of strings @param targets: A list of the '/container/object' or '/container' to be bulk deleted. Note, bulk delete will not remove containers that have objects in them, and there is limit of 1000 containers/objects per delete. @rtype: object @return: The requests response object returned from the call. """ if not headers: headers = {} url = '{0}{1}'.format(self.storage_url, '?bulk-delete') data = '\n'.join([urllib.quote(target) for target in targets]) headers['content-type'] = 'text/plain' headers['content-length'] = str(len(data)) response = self.request('DELETE', url, data=data, headers=headers, requestslib_kwargs=requestslib_kwargs) return response
class BaseHTTPClient(BaseClient): """Re-implementation of Requests' api.py that removes many assumptions. Adds verbose logging. Adds support for response-code based exception injection. (Raising exceptions based on response code) @see: http://docs.python-requests.org/en/latest/api/#configurations """ _exception_handlers = [] _log = cclogging.getLogger(__name__) def __init__(self): super(BaseHTTPClient, self).__init__() @_inject_exception(_exception_handlers) @_log_transaction(log=_log) def request(self, method, url, **kwargs): """ Performs <method> HTTP request to <url> using the requests lib""" return requests.request(method, url, **kwargs) def put(self, url, **kwargs): """ HTTP PUT request """ return self.request('PUT', url, **kwargs) def copy(self, url, **kwargs): """ HTTP COPY request """ return self.request('COPY', url, **kwargs) def post(self, url, data=None, **kwargs): """ HTTP POST request """ return self.request('POST', url, data=data, **kwargs) def get(self, url, **kwargs): """ HTTP GET request """ return self.request('GET', url, **kwargs) def head(self, url, **kwargs): """ HTTP HEAD request """ return self.request('HEAD', url, **kwargs) def delete(self, url, **kwargs): """ HTTP DELETE request """ return self.request('DELETE', url, **kwargs) def options(self, url, **kwargs): """ HTTP OPTIONS request """ return self.request('OPTIONS', url, **kwargs) def patch(self, url, **kwargs): """ HTTP PATCH request """ return self.request('PATCH', url, **kwargs) @classmethod def add_exception_handler(cls, handler): """Adds a specific L{ExceptionHandler} to the HTTP client @warning: SHOULD ONLY BE CALLED FROM A PROVIDER THROUGH A TEST FIXTURE """ cls._exception_handlers.append(handler) @classmethod def delete_exception_handler(cls, handler): """Removes a L{ExceptionHandler} from the HTTP client @warning: SHOULD ONLY BE CALLED FROM A PROVIDER THROUGH A TEST FIXTURE """ if handler in cls._exception_handlers: cls._exception_handlers.remove(handler)
def generate_rsa_ssh_keys(cls, keyfile_name=None, keyfile_path=None, key_size=1024, pass_phrase=""): """ Generates rsa keys """ engine_config = EngineConfig() _log = cclogging.getLogger(__name__) _log.debug( "Creating RSA keys with name: {0} inside folder: {1}".format( keyfile_name, keyfile_path)) # Build the key file names and path if keyfile_name is None: keyfile_name = "test_ssh_key_{0}".format( str(datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S"))) if keyfile_path is None: keyfile_path = engine_config.temp_directory pub_keyfile_path = os.path.join(keyfile_path, "{0}.pub".format(keyfile_name)) private_key_file_path = os.path.join(keyfile_path, keyfile_name) # If the key files already exist, remove them if os.path.isfile(private_key_file_path): os.remove(private_key_file_path) if os.path.isfile(pub_keyfile_path): os.remove(pub_keyfile_path) try: # Generate the keys private_key = RSA.generate(key_size) public_key = private_key.publickey() except ValueError as msg: _log.error("Key Generate exception: \n {0}".format(msg)) return SSHKeyResponse(error=msg) try: # Create the key files and write the keys onto them with open(pub_keyfile_path, "w") as public_key_file: public_key_file.write( public_key.exportKey(passphrase=pass_phrase)) if not os.path.isfile(pub_keyfile_path): return SSHKeyResponse(error="No public key file created") with open(private_key_file_path, "w") as private_key_file: private_key_file.write( private_key.exportKey(passphrase=pass_phrase)) if not os.path.isfile(private_key_file_path): return SSHKeyResponse(error="No private key file created") else: os.chmod(private_key_file_path, 0o700) return SSHKeyResponse( public_key=pub_keyfile_path, private_key=private_key_file_path) except IOError as err: try: errno, strerror = err _log.error("I/O error({0}): {1}".format( errno, strerror)) return SSHKeyResponse(error=strerror) except: return SSHKeyResponse(error=str(err))
class BaseMongoClient(BaseClient): """ @summary: Designed to be a simple interface to make calls to MongoDB """ FAILED = 'failed' SUCCESS = 'success' _log = cclogging.getLogger(__name__) def __init__(self, hostname, db_name, username, password): self.hostname = hostname self.db_name = db_name self.username = username self.password = password self.connection = None self.db = None @classmethod def from_connection_string(cls, uri): params = pymongo.uri_parser.parse_uri(uri) hosts = params.get('nodelist') if len(hosts) == 0: raise Exception("Invalid connection string: {uri}".format( uri=uri)) host, port = hosts[0] return cls(hostname=host, db_name=params.get('database'), username=params.get('username'), password=params.get('password')) def is_connected(self): if self.connection: return self.connection.alive() return False def connect(self, hostname=None, db_name=None): """ @summary: Connects to a server, but does not authenticate. @param hostname: if not specified it'll attempt to use init hostname @param db_name: if not specified it'll attempt to use init db_name @return: """ if hostname is None: hostname = self.hostname if db_name is None: db_name = self.db_name self.connection = MongoClient(hostname) self.db = self.connection[db_name] result = 'Connected' if self.is_connected() else 'Failed to connect' self._log.debug('{0} to MongoDB: {1}'.format(result, hostname)) return self.SUCCESS if self.is_connected() else self.FAILED def disconnect(self): self.connection.close() self._log.debug('Disconnected from MongoDB') def auth(self, username=None, password=None): """ @summary: Attempts to auth with a connected db. Returns FAILED if there isn't an active connection. @param username: if not specified it'll attempt to use init username @param password: if not specified it'll attempt to use init password @return: """ if not self.is_connected(): return self.FAILED if username is None: username = self.username if password is None: password = self.password if username and password: self.db.authenticate(name=username, password=password) return self.SUCCESS def find_one(self, db_obj_name, filter=dict()): if not self.is_connected(): return self.FAILED db_obj = self.db[db_obj_name] return db_obj.find_one(filter) def delete(self, db_obj_name, filter=dict(), just_one=True): if not self.is_connected(): return self.FAILED db_obj = self.db[db_obj_name] return db_obj.remove(filter, just_one)
class BaseHTTPClient(BaseClient): """Re-implementation of Requests' api.py that removes many assumptions. Adds verbose logging. Adds support for response-code based exception injection. (Raising exceptions based on response code) @see: http://docs.python-requests.org/en/latest/api/#configurations """ _exception_handlers = [] _log = cclogging.getLogger(__name__) def __init__(self): self.__config = HTTPPluginConfig() super(BaseHTTPClient, self).__init__() @_inject_exception(_exception_handlers) @_log_transaction(log=_log) def request(self, method, url, **kwargs): """ Performs <method> HTTP request to <url> using the requests lib""" retries = self.__config.retries_on_requests_exceptions # We always allow one attempt, retries are configured via EngineConfig allowed_attempts = 1 + retries # Offsetting xrange range by one to allow proper reporting of which # attempt we are on. for attempt in six.moves.xrange(1, allowed_attempts + 1): try: return requests.request(method, url, **kwargs) except (ConnectionError, HTTPError, Timeout, TooManyRedirects) as e: if retries: warning_string = ( 'Request Lib Error: Attempt {attempt} of ' '{allowed_attempts}\n'.format( attempt=attempt, allowed_attempts=allowed_attempts)) warn(warning_string) warn(e) warn('\n') self._log.critical(warning_string) self._log.exception(e) else: raise e def put(self, url, **kwargs): """ HTTP PUT request """ return self.request('PUT', url, **kwargs) def copy(self, url, **kwargs): """ HTTP COPY request """ return self.request('COPY', url, **kwargs) def post(self, url, data=None, **kwargs): """ HTTP POST request """ return self.request('POST', url, data=data, **kwargs) def get(self, url, **kwargs): """ HTTP GET request """ return self.request('GET', url, **kwargs) def head(self, url, **kwargs): """ HTTP HEAD request """ return self.request('HEAD', url, **kwargs) def delete(self, url, **kwargs): """ HTTP DELETE request """ return self.request('DELETE', url, **kwargs) def options(self, url, **kwargs): """ HTTP OPTIONS request """ return self.request('OPTIONS', url, **kwargs) def patch(self, url, **kwargs): """ HTTP PATCH request """ return self.request('PATCH', url, **kwargs) @classmethod def add_exception_handler(cls, handler): """Adds a specific L{ExceptionHandler} to the HTTP client @warning: SHOULD ONLY BE CALLED FROM A PROVIDER THROUGH A TEST FIXTURE """ cls._exception_handlers.append(handler) @classmethod def delete_exception_handler(cls, handler): """Removes a L{ExceptionHandler} from the HTTP client @warning: SHOULD ONLY BE CALLED FROM A PROVIDER THROUGH A TEST FIXTURE """ if handler in cls._exception_handlers: cls._exception_handlers.remove(handler)
def _log(cls): return cclogging.getLogger(cclogging.get_object_namespace(cls))