def decode(key, enc_type, data): LOG.debug("Getting encoded data for key=%s, enc=%s", key, enc_type) raw_data = None if enc_type in ["gzip+base64", "gz+b64"]: LOG.debug("Decoding %s type of %s", enc_type, key) raw_data = util.decomp_gzip(util.b64d(data)) elif enc_type in ["base64", "b64"]: LOG.debug("Decoding %s type of %s", enc_type, key) raw_data = util.b64d(data) else: LOG.debug("Plain-text data %s", key) raw_data = data return util.decode_binary(raw_data)
def convert_jinja_instance_data(data, prefix='', sep='/', decode_paths=()): """Process instance-data.json dict for use in jinja templates. Replace hyphens with underscores for jinja templates and decode any base64_encoded_keys. """ result = {} decode_paths = [path.replace('-', '_') for path in decode_paths] for key, value in sorted(data.items()): if '-' in key: # Standardize keys for use in #cloud-config/shell templates key = key.replace('-', '_') key_path = '{0}{1}{2}'.format(prefix, sep, key) if prefix else key if key_path in decode_paths: value = b64d(value) if isinstance(value, dict): result[key] = convert_jinja_instance_data( value, key_path, sep=sep, decode_paths=decode_paths) if re.match(r'v\d+', key): # Copy values to top-level aliases for subkey, subvalue in result[key].items(): result[subkey] = subvalue else: result[key] = value return result
def _get_value_from_frame(self, expected_request_id, frame): frame_data = self.line_regex.match(frame).groupdict() if int(frame_data["length"]) != len(frame_data["body"]): raise JoyentMetadataFetchException( "Incorrect frame length given ({0} != {1}).".format( frame_data["length"], len(frame_data["body"]) ) ) expected_checksum = self._checksum(frame_data["body"]) if frame_data["checksum"] != expected_checksum: raise JoyentMetadataFetchException( "Invalid checksum (expected: {0}; got {1}).".format( expected_checksum, frame_data["checksum"] ) ) if frame_data["request_id"] != expected_request_id: raise JoyentMetadataFetchException( "Request ID mismatch (expected: {0}; got {1}).".format( expected_request_id, frame_data["request_id"] ) ) if not frame_data.get("payload", None): LOG.debug("No value found.") return None value = util.b64d(frame_data["payload"]) LOG.debug('Value "%s" found.', value) return value
def convert_jinja_instance_data( data, prefix='', sep='/', decode_paths=(), include_key_aliases=False ): """Process instance-data.json dict for use in jinja templates. Replace hyphens with underscores for jinja templates and decode any base64_encoded_keys. """ result = {} decode_paths = [path.replace('-', '_') for path in decode_paths] for key, value in sorted(data.items()): key_path = '{0}{1}{2}'.format(prefix, sep, key) if prefix else key if key_path in decode_paths: value = b64d(value) if isinstance(value, dict): result[key] = convert_jinja_instance_data( value, key_path, sep=sep, decode_paths=decode_paths, include_key_aliases=include_key_aliases ) if re.match(r'v\d+$', key): # Copy values to top-level aliases for subkey, subvalue in result[key].items(): result[subkey] = copy.deepcopy(subvalue) else: result[key] = value if include_key_aliases: alias_name = get_jinja_variable_alias(key) if alias_name: result[alias_name] = copy.deepcopy(result[key]) return result
def configure(self, passwd, resetPasswd, distro): """ Main method to perform all functionalities based on configuration file inputs. @param passwd: encoded admin password. @param resetPasswd: boolean to determine if password needs to be reset. @return cfg: dict to be used by cloud-init set_passwd code. """ LOG.info('Starting password configuration') if passwd: passwd = util.b64d(passwd) allRootUsers = [] for line in open('/etc/passwd', 'r'): if line.split(':')[2] == '0': allRootUsers.append(line.split(':')[0]) # read shadow file and check for each user, if its uid0 or root. uidUsersList = [] for line in open('/etc/shadow', 'r'): user = line.split(':')[0] if user in allRootUsers: uidUsersList.append(user) if passwd: LOG.info('Setting admin password') distro.set_passwd('root', passwd) if resetPasswd: self.reset_password(uidUsersList) LOG.info('Configure Password completed!')
def decode(key, enc_type, data): """ decode returns the decoded string value of data key is a string used to identify the data being decoded in log messages """ LOG.debug("Getting encoded data for key=%s, enc=%s", key, enc_type) raw_data = None if enc_type in ["gzip+base64", "gz+b64"]: LOG.debug("Decoding %s format %s", enc_type, key) raw_data = util.decomp_gzip(util.b64d(data)) elif enc_type in ["base64", "b64"]: LOG.debug("Decoding %s format %s", enc_type, key) raw_data = util.b64d(data) else: LOG.debug("Plain-text data %s", key) raw_data = data return util.decode_binary(raw_data)
def query_data(noun, seed_device, seed_timeout, strip=False, default=None, b64=None): """Makes a request to via the serial console via "GET <NOUN>" In the response, the first line is the status, while subsequent lines are is the value. A blank line with a "." is used to indicate end of response. If the response is expected to be base64 encoded, then set b64encoded to true. Unfortantely, there is no way to know if something is 100% encoded, so this method relies on being told if the data is base64 or not. """ if not noun: return False with contextlib.closing(get_serial(seed_device, seed_timeout)) as ser: client = JoyentMetadataClient(ser) response = client.get_metadata(noun) if response is None: return default if b64 is None: b64 = query_data('b64-%s' % noun, seed_device=seed_device, seed_timeout=seed_timeout, b64=False, default=False, strip=True) b64 = util.is_true(b64) resp = None if b64 or strip: resp = "".join(response).rstrip() else: resp = "".join(response) if b64: try: return util.b64d(resp) # Bogus input produces different errors in Python 2 and 3; catch both. except (TypeError, binascii.Error): LOG.warn("Failed base64 decoding key '%s'", noun) return resp return resp
def _get_value_from_frame(self, expected_request_id, frame): frame_data = self.line_regex.match(frame).groupdict() if int(frame_data['length']) != len(frame_data['body']): raise JoyentMetadataFetchException( 'Incorrect frame length given ({0} != {1}).'.format( frame_data['length'], len(frame_data['body']))) expected_checksum = self._checksum(frame_data['body']) if frame_data['checksum'] != expected_checksum: raise JoyentMetadataFetchException( 'Invalid checksum (expected: {0}; got {1}).'.format( expected_checksum, frame_data['checksum'])) if frame_data['request_id'] != expected_request_id: raise JoyentMetadataFetchException( 'Request ID mismatch (expected: {0}; got {1}).'.format( expected_request_id, frame_data['request_id'])) if not frame_data.get('payload', None): LOG.debug('No value found.') return None value = util.b64d(frame_data['payload']) LOG.debug('Value "%s" found.', value) return value
def read_context_disk_dir(source_dir, distro, asuser=None): """ read_context_disk_dir(source_dir): read source_dir and return a tuple with metadata dict and user-data string populated. If not a valid dir, raise a NonContextDiskDir """ found = {} for af in CONTEXT_DISK_FILES: fn = os.path.join(source_dir, af) if os.path.isfile(fn): found[af] = fn if not found: raise NonContextDiskDir("%s: %s" % (source_dir, "no files found")) context = {} results = {'userdata': None, 'metadata': {}} if "context.sh" in found: if asuser is not None: try: pwd.getpwnam(asuser) except KeyError as e: raise BrokenContextDiskDir( "configured user '{user}' does not exist".format( user=asuser) ) from e try: path = os.path.join(source_dir, 'context.sh') content = util.load_file(path) context = parse_shell_config(content, asuser=asuser) except subp.ProcessExecutionError as e: raise BrokenContextDiskDir( "Error processing context.sh: %s" % (e) ) from e except IOError as e: raise NonContextDiskDir( "Error reading context.sh: %s" % (e) ) from e else: raise NonContextDiskDir("Missing context.sh") if not context: return results results['metadata'] = context # process single or multiple SSH keys ssh_key_var = None if "SSH_KEY" in context: ssh_key_var = "SSH_KEY" elif "SSH_PUBLIC_KEY" in context: ssh_key_var = "SSH_PUBLIC_KEY" if ssh_key_var: lines = context.get(ssh_key_var).splitlines() results['metadata']['public-keys'] = [ line for line in lines if len(line) and not line.startswith("#") ] # custom hostname -- try hostname or leave cloud-init # itself create hostname from IP address later for k in ('HOSTNAME', 'PUBLIC_IP', 'IP_PUBLIC', 'ETH0_IP'): if k in context: results['metadata']['local-hostname'] = context[k] break # raw user data if "USER_DATA" in context: results['userdata'] = context["USER_DATA"] elif "USERDATA" in context: results['userdata'] = context["USERDATA"] # b64decode user data if necessary (default) if 'userdata' in results: encoding = context.get('USERDATA_ENCODING', context.get('USER_DATA_ENCODING')) if encoding == "base64": try: results['userdata'] = util.b64d(results['userdata']) except TypeError: LOG.warning("Failed base64 decoding of userdata") # generate Network Configuration v2 # only if there are any required context variables # http://docs.opennebula.org/5.4/operation/references/template.html#context-section ipaddr_keys = [k for k in context if re.match(r'^ETH\d+_IP.*$', k)] if ipaddr_keys: onet = OpenNebulaNetwork(context, distro) results['network-interfaces'] = onet.gen_conf() return results
def read_context_disk_dir(source_dir, asuser=None): """ read_context_disk_dir(source_dir): read source_dir and return a tuple with metadata dict and user-data string populated. If not a valid dir, raise a NonContextDiskDir """ found = {} for af in CONTEXT_DISK_FILES: fn = os.path.join(source_dir, af) if os.path.isfile(fn): found[af] = fn if not found: raise NonContextDiskDir("%s: %s" % (source_dir, "no files found")) context = {} results = {'userdata': None, 'metadata': {}} if "context.sh" in found: if asuser is not None: try: pwd.getpwnam(asuser) except KeyError as e: raise BrokenContextDiskDir("configured user '%s' " "does not exist", asuser) try: path = os.path.join(source_dir, 'context.sh') content = util.load_file(path) context = parse_shell_config(content, asuser=asuser) except util.ProcessExecutionError as e: raise BrokenContextDiskDir("Error processing context.sh: %s" % (e)) except IOError as e: raise NonContextDiskDir("Error reading context.sh: %s" % (e)) else: raise NonContextDiskDir("Missing context.sh") if not context: return results results['metadata'] = context # process single or multiple SSH keys ssh_key_var = None if "SSH_KEY" in context: ssh_key_var = "SSH_KEY" elif "SSH_PUBLIC_KEY" in context: ssh_key_var = "SSH_PUBLIC_KEY" if ssh_key_var: lines = context.get(ssh_key_var).splitlines() results['metadata']['public-keys'] = [l for l in lines if len(l) and not l.startswith("#")] # custom hostname -- try hostname or leave cloud-init # itself create hostname from IP address later for k in ('HOSTNAME', 'PUBLIC_IP', 'IP_PUBLIC', 'ETH0_IP'): if k in context: results['metadata']['local-hostname'] = context[k] break # raw user data if "USER_DATA" in context: results['userdata'] = context["USER_DATA"] elif "USERDATA" in context: results['userdata'] = context["USERDATA"] # b64decode user data if necessary (default) if 'userdata' in results: encoding = context.get('USERDATA_ENCODING', context.get('USER_DATA_ENCODING')) if encoding == "base64": try: results['userdata'] = util.b64d(results['userdata']) except TypeError: LOG.warn("Failed base64 decoding of userdata") # generate static /etc/network/interfaces # only if there are any required context variables # http://opennebula.org/documentation:rel3.8:cong#network_configuration ipaddr_keys = [k for k in context if re.match(r'^ETH\d+_IP$', k)] if ipaddr_keys: onet = OpenNebulaNetwork(context) results['network-interfaces'] = onet.gen_conf() return results