def _store_cluster_cert(cert, no_check): """Store cluster certificate bundle downloaded from cluster and store settings in core.ssl_verify :param cert: ca cert from cluster :type cert: str :param no_check: whether to verify downloaded cert :type no_check: bool :returns: whether or not we are storing the downloaded cert bundle :rtype: bool """ if not no_check: if not _user_cert_validation(cert): # we don't have a cert, but we still want to validate SSL config.set_val("core.ssl_verify", "true") return False with util.temptext() as temp_file: _, temp_path = temp_file with open(temp_path, 'w') as f: f.write(cert) cert_path = os.path.join( config.get_attached_cluster_path(), "dcos_ca.crt") util.sh_copy(temp_path, cert_path) config.set_val("core.ssl_verify", cert_path) return True
def _store_cluster_cert(cert, no_check): """Store cluster certificate bundle downloaded from cluster and store settings in core.ssl_verify :param cert: ca cert from cluster :type cert: str :param no_check: whether to verify downloaded cert :type no_check: bool :returns: whether or not we are storing the downloaded cert bundle :rtype: bool """ if not no_check and not _user_cert_validation(cert): raise DCOSException("Couldn't get confirmation for the fingerprint.") with util.temptext() as temp_file: _, temp_path = temp_file with open(temp_path, 'w') as f: f.write(cert) cert_path = os.path.join( config.get_attached_cluster_path(), "dcos_ca.crt") util.sh_copy(temp_path, cert_path) config.set_val("core.ssl_verify", cert_path) return True
def setup_cluster_config(dcos_url, temp_path, stored_cert): """ Create a cluster directory for cluster specified in "temp_path" directory. :param dcos_url: url to DC/OS cluster :type dcos_url: str :param temp_path: path to temporary config dir :type temp_path: str :param stored_cert: whether we stored cert bundle in 'setup' dir :type stored_cert: bool :returns: path to cluster specific directory :rtype: str """ try: # find cluster id cluster_url = dcos_url.rstrip('/') + '/metadata' res = http.get(cluster_url, timeout=1) cluster_id = res.json().get("CLUSTER_ID") except DCOSException as e: msg = ("Error trying to find cluster id: {}\n " "Please make sure the provided DC/OS URL is valid: {}".format( e, dcos_url)) raise DCOSException(msg) # create cluster id dir cluster_path = os.path.join(config.get_config_dir_path(), constants.DCOS_CLUSTERS_SUBDIR, cluster_id) if os.path.exists(cluster_path): raise DCOSException("Cluster [{}] is already setup".format(dcos_url)) util.ensure_dir_exists(cluster_path) # move contents of setup dir to new location for (path, dirnames, filenames) in os.walk(temp_path): for f in filenames: util.sh_copy(os.path.join(path, f), cluster_path) cluster = Cluster(cluster_id) config_path = cluster.get_config_path() if stored_cert: cert_path = os.path.join(cluster_path, "dcos_ca.crt") config.set_val("core.ssl_verify", cert_path, config_path=config_path) cluster_name = cluster_id try: url = dcos_url.rstrip('/') + '/mesos/state-summary' name_query = http.get(url, toml_config=cluster.get_config()) cluster_name = name_query.json().get("cluster") except DCOSException: pass config.set_val("cluster.name", cluster_name, config_path=config_path) return cluster_path
def _copy_config_to_dir(name, dst_dir): """ :param name: name of the config fixture to copy. :type name: str """ fixture_config = os.path.join(os.path.dirname(__file__), '../data/cluster_migration/{}'.format(name)) # make sure the config has the proper permission os.chmod(fixture_config, 0o600) dst = os.path.join(dst_dir, 'dcos.toml') util.sh_copy(fixture_config, dst)
def move_to_cluster_config(): """Create a cluster specific config file + directory from a global config file. This will move users from global config structure (~/.dcos/dcos.toml) to the cluster specific one (~/.dcos/clusters/CLUSTER_ID/dcos.toml) and set that cluster as the "attached" cluster. :rtype: None """ global_config = config.get_global_config() dcos_url = config.get_config_val("core.dcos_url", global_config) # if no cluster is set, do not move the cluster yet if dcos_url is None: return try: # find cluster id cluster_url = dcos_url.rstrip('/') + '/metadata' res = http.get(cluster_url) cluster_id = res.json().get("CLUSTER_ID") # don't move cluster if dcos_url is not valid except DCOSException as e: logger.error("Error trying to find cluster id: {}".format(e)) return # create cluster id dir cluster_path = os.path.join(config.get_config_dir_path(), constants.DCOS_CLUSTERS_SUBDIR, cluster_id) util.ensure_dir_exists(cluster_path) # move config file to new location global_config_path = config.get_global_config_path() util.sh_copy(global_config_path, cluster_path) # set cluster as attached util.ensure_file_exists( os.path.join(cluster_path, constants.DCOS_CLUSTER_ATTACHED_FILE))
def _bundle(package_directory, output_directory): """ :param package_directory: directory containing the package :type package_directory: str :param output_directory: directory where to save the package zip file :type output_directory: str :returns: process status :rtype: int """ if output_directory is None: output_directory = os.getcwd() logger.debug('Using [%s] as the ouput directory', output_directory) # Find package.json file and parse it if not os.path.exists(os.path.join(package_directory, 'package.json')): raise DCOSException( ('The file package.json is required in the package directory ' '[{}]').format(package_directory)) package_json = _validate_json_file( os.path.join(package_directory, 'package.json')) with tempfile.NamedTemporaryFile() as temp_file: with zipfile.ZipFile(temp_file.name, mode='w', compression=zipfile.ZIP_DEFLATED, allowZip64=True) as zip_file: # list through package directory and add files zip archive for filename in sorted(os.listdir(package_directory)): fullpath = os.path.join(package_directory, filename) if filename == 'marathon.json.mustache': zip_file.write(fullpath, arcname=filename) elif filename in [ 'config.json', 'command.json', 'package.json' ]: # schema check the config and command json file _validate_json_file(fullpath) zip_file.write(fullpath, arcname=filename) elif filename == 'assets' and os.path.isdir(fullpath): _bundle_assets(fullpath, zip_file) elif filename == 'images' and os.path.isdir(fullpath): _bundle_images(fullpath, zip_file) else: # anything else is an error raise DCOSException( ('Error bundling package. Extra file in package ' 'directory [{}]').format(fullpath)) # Compute the name of the package file zip_file_name = os.path.join( output_directory, '{}-{}-{}.zip'.format(package_json['name'], package_json['version'], _hashfile(temp_file.name))) if os.path.exists(zip_file_name): raise DCOSException( 'Output file [{}] already exists'.format(zip_file_name)) # rename with digest util.sh_copy(temp_file.name, zip_file_name) # Print the full path to the file emitter.publish( errors.DefaultError( 'Created DCOS Universe package [{}].'.format(zip_file_name))) return 0
def _bundle(package_directory, output_directory): """ :param package_directory: directory containing the package :type package_directory: str :param output_directory: directory where to save the package zip file :type output_directory: str :returns: process status :rtype: int """ if output_directory is None: output_directory = os.getcwd() logger.debug('Using [%s] as the ouput directory', output_directory) # Find package.json file and parse it if not os.path.exists(os.path.join(package_directory, 'package.json')): raise DCOSException( ('The file package.json is required in the package directory ' '[{}]').format(package_directory)) package_json = _validate_json_file( os.path.join(package_directory, 'package.json')) with tempfile.NamedTemporaryFile() as temp_file: with zipfile.ZipFile( temp_file.name, mode='w', compression=zipfile.ZIP_DEFLATED, allowZip64=True) as zip_file: # list through package directory and add files zip archive for filename in sorted(os.listdir(package_directory)): fullpath = os.path.join(package_directory, filename) if filename == 'marathon.json.mustache': zip_file.write(fullpath, arcname=filename) elif filename in ['config.json', 'command.json', 'package.json']: # schema check the config and command json file _validate_json_file(fullpath) zip_file.write(fullpath, arcname=filename) elif filename == 'assets' and os.path.isdir(fullpath): _bundle_assets(fullpath, zip_file) elif filename == 'images' and os.path.isdir(fullpath): _bundle_images(fullpath, zip_file) else: # anything else is an error raise DCOSException( ('Error bundling package. Extra file in package ' 'directory [{}]').format(fullpath)) # Compute the name of the package file zip_file_name = os.path.join( output_directory, '{}-{}-{}.zip'.format( package_json['name'], package_json['version'], _hashfile(temp_file.name))) if os.path.exists(zip_file_name): raise DCOSException( 'Output file [{}] already exists'.format( zip_file_name)) # rename with digest util.sh_copy(temp_file.name, zip_file_name) # Print the full path to the file emitter.publish( errors.DefaultError( 'Created DCOS Universe package [{}].'.format(zip_file_name))) return 0