def __init__(self, file_in, file_out, debug=False): self.data = { 'file': file_in, 'secret': file_out, 'pem': path_expand('~/.ssh/id_rsa.pub.pem'), 'key': path_expand(' ~/.ssh/id_rsa') } self.debug = debug self.encrypt()
def test_create_local_source(self): HEADING() Benchmark.Start() self.sourcedir = path_expand("~/.cloudmesh/storage/test/") self.create_file("~/.cloudmesh/storage/test/a/a.txt", "content of a") self.create_file("~/.cloudmesh/storage/test/a/b/b.txt", "content of b") self.create_file( "~/.cloudmesh/storage/test/a/b/c/c.txt", "content of c") Benchmark.Stop() # test if the files are ok assert True
def test_create_source(self): HEADING() StopWatch.start("create source") self.sourcedir = path_expand("~/.cloudmesh/storage/test/") self.create_file("~/.cloudmesh/storage/test/a/a.txt", "content of a") self.create_file("~/.cloudmesh/storage/test/a/b/b.txt", "content of b") self.create_file("~/.cloudmesh/storage/test/a/b/c/c.txt", "content of c") StopWatch.stop("create source") # test if the files are ok assert True
def cat(mask_secrets=True, attributes=None, path="~/.cloudmesh/cloudmesh.yaml", color=None): _path = path_expand("~/.cloudmesh/cloudmesh.yaml") with open(_path) as f: content = f.read().split("\n") return Config.cat_lines(content, mask_secrets=mask_secrets, attributes=None, color=None)
def test_recursive_get(self): # must be implemented by student into ~/.cloudmesh/storage/test/get # see self.content which contains all files but you must add get/ home = self.sourcedir d2 = Path(path_expand(f"{home}/get")) d2.mkdir(parents=True, exist_ok=True) StopWatch.start("GET Directory --recursive") dnld_files = self.p.get(self.p.service, "/a", f"{home}/get", True) StopWatch.stop("GET Directory --recursive") pprint(dnld_files) assert dnld_files is not None
def check(path="~/.cloudmesh/cloudmesh.yaml"): error = False path = path_expand(path) banner("Check for Version") config = Config() dist_version = config.version() yaml_version = config["cloudmesh.version"] if dist_version == yaml_version: Console.ok(f"The version is {dist_version}") else: Console.error("Your version do not match") print() print("Found ~/.cloudmesh/cloudmesh.yaml:", yaml_version) print("Please update to version :", dist_version) print("") print("See also: ") print() print( " https://github.com/cloudmesh/cloudmesh-configuration/blob/master/cloudmesh/configuration/etc/cloudmesh.yaml" ) banner("Check for TAB Characters") error = Config.check_for_tabs(path) if not error: Console.ok("No TABs found") banner("yamllint") try: import yamllint options = \ '-f colored ' \ '-d "{extends: relaxed, ""rules: {line-length: {max: 256}}}"' r = Shell.live('yamllint {options} {path}'.format(**locals())) if 'error' in r or 'warning' in r: print(70 * '-') print(" line:column description") print() else: Console.ok("No issues found") print() except: Console.error("Could not execute yamllint. Please add with") Console.error("pip install yamllint")
def put(self, source=None, service=None, destination=None, recusrive=False): """ puts the source on the service :param service: the name of the service in the yaml file :param source: the source which either can be a directory or file :param destination: the destination which either can be a directory or file :param recursive: in case of directory the recursive referes to all subdirectories in the specified source :return: dict """ files = self.list() if recusrive: raise NotImplementedError src = path_expand(source) dest = path_expand(destination) shutil.copy2(src, dest) return []
def fix_keys_file(filename): # concatenate ~/.ssh/id_rsa.pub lines = readfile(filename) key = readfile(path_expand("~/.ssh/id_rsa.pub")) authorized_keys = lines + key keys = ''.join(authorized_keys) # remove duplicates and empty lines keys_list = [x for x in list(set(keys.splitlines())) if x != '\n'] keys = ('\n'.join(keys_list) + '\n') writefile(filename, str(keys))
def test_clean(self): HEADING() hosts = Parameter.expand("red[01-03],tmp") for host in hosts: filename = path_expand(f"~/.ssh/id_rsa_{host}") print(f"rm {filename} {filename}.pub ") try: os.remove(filename) os.remove(f"{filename}.pub") except Exception as e: # noqa: F841 pass
def __init__(self, name=None, configuration="~/.cloudmesh/cloudmesh.yaml"): configuration = path_expand(configuration) conf = Config(name, configuration)["cloudmesh"] super().__init__(name, configuration) self.user = conf["profile"]["user"] self.spec = conf["cloud"][name] self.cloud = name cred = self.spec["credentials"] deft = self.spec["default"] self.cloudtype = self.spec["cm"]["kind"] self.resource_group = cred["resourcegroup"] self.credentials = cred
def find_image_dirs(directory='dest'): directory = path_expand(directory) directories = [] p = Path(directory) images = [] for t in ['png', "PNG", "JPG", "jpg", "JPEG", "jpeg"]: images = images + list(p.glob(f"**/*{t}")) dirs = [] for image in images: dirs.append(os.path.dirname(image)) dirs = set(dirs) return dirs
def write(self, filename=None): """ Write the cmdline config to the specified filename """ if filename is None: raise Exception("write called with no filename") if self.script is None: raise Exception("no script found. Did you run .get() first?") tmp_location = path_expand('~/.cloudmesh/cmburn/firstrun.sh.tmp') writefile(tmp_location, self.script) Shell.run(f'cat {tmp_location} | sudo tee {filename}') Shell.execute('rm', arguments=[tmp_location])
def print_csv2np(filename: str) -> str: """ reads a previously uploaded csv file into a numpy array and prints it :param filename: base filename :type filename: str :return: result :return type: str """ x = np.genfromtxt(path_expand('~/.cloudmesh/upload-file/' + filename), delimiter=',') return str(x)
def get(self, source=None, destination=None, recursive=False): """ gets the source and copies it in destination :param source: the source which either can be a directory or file :param destination: the destination which either can be a directory or file :param recursive: in case of directory the recursive refers to all subdirectories in the specified source :return: dict """ destination = self._dirname(source) if recursive: src = path_expand(source) dest = path_expand(destination) shutil.copytree(src, dest) else: src = path_expand(source) dest = path_expand(destination) shutil.copy2(src, dest) return self.list(source=destination, recursive=recursive)
def load(self, config_path=None): """ loads a configuration file :param config_path: :type config_path: :return: :rtype: """ # VERBOSE("Load config") self.config_path = Path(path_expand(config_path or self.location.config())).resolve() self.config_folder = dirname(self.config_path) self.create(config_path=config_path) with open(self.config_path, "r") as stream: content = stream.read() # content = path_expand(content) content = self.spec_replace(content) self.data = yaml.load(content, Loader=yaml.SafeLoader) # print (self.data["cloudmesh"].keys()) # self.data is loaded as nested OrderedDict, can not use set or get # methods directly if self.data is None: raise EnvironmentError( "Failed to load configuration file cloudmesh.yaml, " "please check the path and file locally") # # populate default variables # self.variable_database = Variables(filename="~/.cloudmesh/variable.dat") self.set_debug_defaults() default = self.default() for name in self.default(): if name not in self.variable_database: self.variable_database[name] = default[name] if "cloud" in default: self.cloud = default["cloud"] else: self.cloud = None
def __init__(self, name=None, configuration="~/.cloudmesh/cloudmesh.yaml"): """ Initializes the provider. The default parameters are read from the configutation file that is defined in yaml format. :param name: The name of the provider as defined in the yaml file :param configuration: The location of the yaml configuration filw """ HEADING(c=".") conf = Config(configuration)["cloudmesh"] #self.user = conf["profile"] self.user = Config()["cloudmesh"]["profile"]["user"] self.spec = conf["cloud"][name] self.cloud = name cred = self.spec["credentials"] self.cloudtype = self.spec["cm"]["kind"] super().__init__(name, conf) self.name_generator = Name( experiment="exp", group="grp", user = "******", kind="vm", counter=1) self.name = str(self.name_generator) self.name_generator.incr() self.name = str(self.name_generator) self.key_path = path_expand(Config()["cloudmesh"]["profile"]["publickey"]) f = open(self.key_path, 'r') self.key_val = f.read() self.testnode = None if self.cloudtype in Provider.ProviderMapper: self.driver = get_driver( Provider.ProviderMapper[self.cloudtype]) if self.cloudtype == 'google': self.cloudman = self.driver( cred["client_email"], cred["path_to_json_file"], project = cred["project"] ) else: print("Specified provider not available") self.cloudman = None self.default_image = None self.default_size = None self.public_key_path = conf["profile"]["publickey"]
def __init__(self, name=None): """ Initializes the provider. The default parameters are read from the configuration file that is defined in yaml format. :param name: The name of the provider as defined in the yaml file :param configuration: The location of the yaml configuration file """ self.config = Config() conf = self.config["cloudmesh"] super().__init__(name) self.user = self.config["cloudmesh.profile.user"] self.spec = conf["cloud"][name] self.cloud = name self.default = self.spec["default"] self.cloudtype = self.spec["cm"]["kind"] self.cred = self.config[f"cloudmesh.cloud.{name}.credentials"] fields = ["user", "fingerprint", "key_file", "pass_phrase", "tenancy", "compartment_id", "region"] for field in fields: if self.cred[field] == 'TBD': Console.error( f"The credential for Oracle cloud is incomplete. {field} " "must not be TBD") self.credential = self._get_credentials(self.cred) self.compute = oci.core.ComputeClient(self.credential) self.virtual_network = oci.core.VirtualNetworkClient(self.credential) self.identity_client = oci.identity.IdentityClient(self.credential) self.compartment_id = self.credential["compartment_id"] try: self.public_key_path = conf["profile"]["publickey"] self.key_path = path_expand( Config()["cloudmesh"]["profile"]["publickey"]) f = open(self.key_path, 'r') self.key_val = f.read() except: raise ValueError("the public key location is not set in the " "profile of the yaml file.")
def detect_text_google() -> str: """ Detects text in the file using Google Vision API. :return: p :return type: str """ path = path_expand('~/.cloudmesh/google.json') # Get credentials credentials = service_account.Credentials.from_service_account_file(path) client = vision.ImageAnnotatorClient(credentials=credentials) path = path_expand( '~/cm/cloudmesh-openapi/tests/image-analysis/sign_text.png') with io.open(path, 'rb') as image_file: content = image_file.read() image = vision.types.Image(content=content) response = client.text_detection(image=image) try: texts = response.text_annotations p = {'Texts': (texts[0].description)} except Exception as e: p = {'Text': "error could not use image service"} print(e) if response.error.message: raise Exception('{}\nFor more info on error messages, check: ' 'https://cloud.google.com/apis/design/errors'.format( response.error.message)) return jsonify(p)
def get(self, arguments): self.cwd = path_expand(os.path.curdir) filename = arguments['--filename'] if filename is None: Console.error(f"--filename={filename}") self.filename = path_expand(filename) if not os.path.isfile(filename): Console.error(f"--filename={self.filename} does not exist") self.module_directory = os.path.dirname(self.filename) self.module_name = os.path.basename(self.filename).split('.')[0] sys.path.append(self.module_directory) self.yamlfile = arguments.yamlfile or self.filename.rsplit( ".py")[0] + ".yaml" self.yamldirectory = os.path.dirname(self.yamlfile) self.function = arguments.FUNCTION or os.path.basename( self.filename).split('.')[0] self.serverurl = arguments.serverurl or "http://localhost:8080/cloudmesh" self.import_class = arguments.import_class or False self.all_functions = arguments.all_functions or False self.basic_auth = arguments.basic_auth
def cat_bibfiles(directory, output): d = path_expand(directory) bibs = list(Path(d).glob("**/*.bib")) pprint(bibs) r = "" for bib in bibs: bib = str(bib) content = readfile(bib) r = r + "\n\n% " + bib + "\n\n" + content writefile(output, r) return list(bibs)
def __init__(self, file=None, bucket=None, glue_job=None, glue_role=None, cmd_name=None): if file is not None: self.file = path_expand(file) self.bucket = bucket self.glue_job = glue_job self.glue_role = glue_role self.cmd_name = cmd_name self.glue = boto3.client("glue") self.s3 = boto3.client("s3")
def _fetch_results_in_parallel(self, job_metadata, job_id, all_job_ids): """ This method is used to fetch the results from remote nodes. :param job_metadata: the dictionary containing the information about the previously submitted job :param job_id: the tuple containing destination node, destination pid and destination node index when the job was submitted :param all_job_ids: :return: """ dest_node_info = self.slurm_cluster path = path_expand(dest_node_info['credentials']['sshconfigpath']) dest_job_id = job_id ssh_caller = lambda *x: self._ssh(dest_node_info['name'], path, *x) scp_caller = lambda *x: self._scp(dest_node_info['name'], path, *x) # # use the qstat from cloudmesh, we have a whole library for that # ps_output = ssh_caller("qstat -u $USER | grep %s" % job_id) if len(ps_output) == 0 or ' c ' in ps_output.lower(): if not os.path.exists(job_metadata['local_path']): os.makedirs(job_metadata['local_path']) # TODO: REPLACE WITH .format scp_caller( '-r', '%s:%s' % (dest_node_info['name'], job_metadata['remote_path']), os.path.join(job_metadata['local_path'], '')) os.remove( os.path.join( job_metadata['local_path'], os.path.basename( os.path.normpath(job_metadata['remote_path'])), job_metadata['script_name'])) os.remove( os.path.join( job_metadata['local_path'], os.path.basename( os.path.normpath(job_metadata['remote_path'])), job_metadata['slurm_script_name'])) if job_metadata['input_type'] == 'params+file': os.remove( os.path.join( job_metadata['local_path'], os.path.basename( os.path.normpath(job_metadata['remote_path'])), job_metadata['argfile_name'])) all_job_ids.remove(dest_job_id) # TODO: REPLACE WITH .format print("Results collected from %s for jobID %s" % (dest_node_info['name'], dest_job_id))
def __init__(self, dryrun=False, force=False): """ Initialization of the MOngo installer """ self.dryrun = dryrun self.force = force self.config = Config() self.data = self.config.data["cloudmesh"]["data"]["mongo"] self.machine = platform.lower() download = self.config[ f"cloudmesh.data.mongo.MONGO_DOWNLOAD.{self.machine}"] self.mongo_code = path_expand(download["url"]) self.mongo_path = path_expand(download["MONGO_PATH"]) self.mongo_log = path_expand(download["MONGO_LOG"]) self.mongo_home = path_expand(download["MONGO_HOME"]) if self.dryrun: print(self.mongo_path) print(self.mongo_log) print(self.mongo_home) print(self.mongo_code)
def delete(self, source=None, recursive=False): """ deletes the source :param source: the source which either can be a directory or file :param recursive: in case of directory the recursive refers to all subdirectories in the specified source :return: dict """ raise NotImplementedError source = self._dirname(source) entries = self._list(source=source, recursive=recursive, ststus="deleted") shutil.rmtree(path_expand(source)) return entries
def create_metadata(metadata, location): location = path_expand(location) Path(os.path.dirname(location)).mkdir(parents=True, exist_ok=True) if not os.path.isfile(location): metadata_file = pkg_resources.resource_filename( "bookmanager", 'template/epub/metadata.txt') meta = copy.deepcopy(metadata) for field in ["author", "title"]: meta[field] = meta[field].replace("\n", " ") content = readfile(metadata_file) content = content.format(**meta) writefile(location, content)
def file_upload(self) -> str: """ This function will upload file into .cloudmesh/upload-file location This will first get upload file object from request.files function and then override this object in given location. """ file = connexion.request.files.get("upload") filename = file.filename if file: file_path = f"~/.cloudmesh/upload-file" p = Path(path_expand(file_path)) p.mkdir(parents=True, exist_ok=True) file.save(f'{p.absolute()}/{filename}') return filename
def __init__(self, config='./toc.yaml'): """ Initialize the Config class. :param config: A local file path to cloudmesh yaml config with a root element `cloudmesh`. Default: `.toc.yaml` Example: --- - BOOK: - "{b516}/preface.md" - INTRODUCTION: - "{manager}/other.md" - CLOUD: - "{manager}/other.md" - AWS: - "{manager}/other.md" - "i {manager}" - "r {manager}" - "p {manager}/test.py" - DEVOPS: - "{manager}/other.md" - ANSIBLE: - "{manager}/other.md" - "i {manager}" - "r {manager}" - "p {manager}/test.py" """ self.__dict__ = self.__shared_state if "data" not in self.__dict__: # VERBOSE("Load config") self.config_path = Path(path_expand(config)).resolve() self.config_folder = dirname(self.config_path) with open(self.config_path, "r") as content: self.data = yaml.load(content, Loader=yaml.SafeLoader) self.variables = dict(self.data) keys = self.variables.keys() del self.variables['BOOK'] self.book = list(self.data['BOOK']) self.flat = munchify(self.variables) self.book = self.spec_replace(self.book, self.variables) self.variables = dict_flatten(self.variables, sep=".")
def gather_keys( username=None, hosts=None, filename="~/.ssh/id_rsa.pub", key="~/.ssh/id_rsa", processors=3, dryrun=False): """ returns in a list the keys of the specified hosts :param username: :param hosts: :param filename: :param key: :param dryrun: :return: """ names = Parameter.expand(hosts) results_key = Host.ssh(hosts=names, command='cat .ssh/id_rsa.pub', username=username, verbose=False) #results_authorized = Host.ssh(hosts=names, # command='cat .ssh/id_rsa.pub', # username=username, # verbose=False) filename = path_expand(filename) localkey = { 'host': "localhost", 'command': [''], 'execute': "", 'stdout': readfile(filename).strip(), 'stderr': None, 'returncode': True, 'success': True, 'date': DateTime.now() } if results_key is None: # and results_authorized is None: return "" # geting the output and also removing duplicates output = [localkey['stdout']] + \ list(set([element["stdout"] for element in results_key])) output = '\n'.join(output) + "\n" return output
def add(self, name=None, source=None, group=None): """ key add [NAME] [--source=FILENAME] key add [NAME] [--source=git] key add [NAME] [--source=ssh] """ if source == "git": config = Config() username = config["cloudmesh.profile.github"] keys = SSHkey().get_from_git(username) for key in keys: key['group'] = group or ["git"] elif source == "ssh": key = SSHkey(name=name) key['group'] = list(set([group] + ["local", "ssh"])) keys = [key] else: # source is filename def get_group_name(x): if x and "." in x: x = [x.rsplit(".", 1)[0]] return x if not group: group = ["local"] file_group = os.path.basename(source) file_group = get_group_name(file_group) group = group + file_group filename = path_expand(source) lines = readfile(filename).splitlines() keys = [] group = [get_group_name(x) for x in group] for line in lines: key = SSHkey() key_group = group + [line.split("-", 1)[0]] key.add(key=line, group=key_group, filename=source) key["cm"]["name"] = key["name"] = line.split(' ', 2)[2] keys.append(key) return keys
def get_from_dir(cls, directory=None, store=True): directory = directory or path_expand("~/.ssh") # find way that also works on windows, code always must work on windows # and Linux, if not you need to have if condition os.system("chmod 700 $HOME /.ssh") files = [file for file in os.listdir(expanduser(path_expand(directory))) if file.lower().endswith(".pub")] d = [] for file in files: print(file) path = directory + "/" + file # find way that also works on windows, code always must work on # windows and Linux, if not you need to have if condition os.system("chmod 700 $HOME /.ssh") with open(path) as fd: for pubkey in map(str.strip, fd): # skip empty lines if not pubkey: continue print(pubkey) d.append(pubkey) return d
def setup(self): StopWatch.start("vdir setup") self.vdir = Vdir() self.endpoint = 'box:/test.txt' self.directory_and_name = '/testdir/test' self.directory = 'testdir' self.file = 'test' self.create_file('~/.cloudmesh/vdir/test/test.txt', 'test file') self.destination = path_expand("~/.cloudmesh/vdir/test") variables = Variables() service = Parameter.expand(variables['storage'])[0] self.p = Provider(service=service) self.p.put(source='~/.cloudmesh/vdir/test/test.txt', destination='/', recursive=False) StopWatch.stop("vdir setup")
def pem_create(self): command = path_expand("openssl rsa -in {key} -pubout > {pem}".format(**self.data)) self._execute(command)
def encrypt(self): # encrypt the file into secret.txt print (self.data) command = path_expand("openssl rsautl -encrypt -pubin -inkey {pem} -in {file} -out {secret}".format(**self.data)) self._execute(command)
def decrypt(self, filename=None): if filename is not None: self.data['secret'] = filename command = path_expand("openssl rsautl -decrypt -inkey {key} -in {secret}".format(**self.data)) self._execute(command)
def pem_cat(slef): command = path_expand("cat {pem}".format(**self.data)) self._execute(command)