def update_dict(self, elements, kind=None): """ converts the dict into a list :param elements: the list of original dicts. If elements is a single dict a list with a single element is returned. :param kind: for some kinds special attributes are added. This includes key, vm, image, flavor. :return: The list with the modified dicts """ if elements is None: return None d = [] for key, entry in elements.items(): entry['name'] = key if "cm" not in entry: entry['cm'] = {} # if kind == 'ip': # entry['name'] = entry['floating_ip_address'] entry["cm"].update({ "kind": kind, "driver": self.cloudtype, "cloud": self.cloud, "name": key }) if kind == 'vm': entry["cm"]["updated"] = str(DateTime.now()) # if 'public_v4' in entry: # entry['ip_public'] = entry['public_v4'] # if "created_at" in entry: # entry["cm"]["created"] = str(entry["created_at"]) # del entry["created_at"] # if 'status' in entry: # entry["cm"]["status"] = str(entry["status"]) # else: # entry["cm"]["created"] = entry["modified"] elif kind == 'image': entry["cm"]["created"] = entry["updated"] = str( DateTime.now()) # elif kind == 'version': # entry["cm"]["created"] = str(DateTime.now()) d.append(entry) return d
def delete(self, path, recursive=True): """ adds a delete action to the queue :param path: :param recursive: :return: """ date = DateTime.now() uuid_str = str(uuid.uuid1()) specification = textwrap.dedent(f""" cm: number: {self.number} name: "{path}" kind: storage id: {uuid_str} cloud: {self.name} collection: {self.collection} created: {date} action: delete source: path: {path} recursive: {recursive} status: waiting """) entries = yaml.load(specification, Loader=yaml.SafeLoader) # todo: this increment wouldn't work because at each call, there will be a new instance of the class self.number = self.number + 1 return entries
def list(self, path, dir_only=False, recursive=False): """ adds a list action to the queue list the directory in the storage service :param path: :param dir_only: :param recursive: :return: """ date = DateTime.now() uuid_str = str(uuid.uuid1()) specification = self.create_spec( f""" action: list path: {path} dir_only:{dir_only} recursive:{recursive} status: waiting """, **locals()) entries = yaml.load(specification, Loader=yaml.SafeLoader) self.number = self.number + 1 return entries
def copy(self, sourcefile, destinationfile, recursive=False): """ adds a copy action to the queue copies the file from the source service to the destination service using the file located in the path and storing it into the remote. If remote is not specified path is used for it. The copy will not be performed if the files are the same. :param sourcefile: :param destinationfile: :param recursive: :return: """ date = DateTime.now() uuid_str = str(uuid.uuid1()) specification = self.create_spec( f""" action: copy source: path: {sourcefile} destination: path: {destinationfile} recursive: {recursive} status: waiting """, **locals()) entries = yaml.load(specification, Loader=yaml.SafeLoader) self.number = self.number + 1 return entries
def delete(self, path, recursive=True): """ adds a delete action to the queue :param path: :return: """ date = DateTime.now() uuid_str = str(uuid.uuid1()) specification = textwrap.dedent(f""" cm: number: {self.number} name: "{path}" kind: storage id: {uuid_str} cloud: {self.name} collection: {self.collection} created: {date} action: delete source: path: {path} recursive: {recursive} status: waiting """) entries = yaml.load(specification, Loader=yaml.SafeLoader) self.number = self.number + 1 return entries
def mkdir(self, path): """ adds a mkdir action to the queue create the directory in the storage service :param service: service must be either source or destination :param path: :return: """ date = DateTime.now() uuid_str = str(uuid.uuid1()) specification = textwrap.dedent(f""" cm: number: {self.number} kind: storage id: {uuid_str} cloud: {self.name} name: {path} collection: {self.collection} created: {date} action: mkdir path: {path} status: waiting """) entries = yaml.load(specification, Loader=yaml.SafeLoader) self.number = self.number + 1 return entries
def list(self, path, dir_only=False, recursive=False): """ adds a list action to the queue list the directory in the storage service :param path: :param dir_only: :param recursive: :return: """ date = DateTime.now() uuid_str = str(uuid.uuid1()) specification = textwrap.dedent(f""" cm: number: {self.number} kind: storage id: {uuid_str} cloud: {self.name} name: {path} collection: {self.collection} created: {date} action: list path: {path} dir_only:{dir_only} recursive:{recursive} status: waiting """) entries = yaml.load(specification, Loader=yaml.SafeLoader) # todo: this increment wouldn't work because at each call, there will be a new instance of the class self.number = self.number + 1 return entries
def cancel(self, id=None): """ cancels a job with a specific id :param id: :return: """ # if None all are canceled date = DateTime.now() uuid_str = str(uuid.uuid1()) specification = textwrap.dedent(f""" cm: number: {self.number} name: "{id}" kind: storage id: {uuid_str} cloud: {self.name} collection: {self.collection} created: {date} action: cancel status: waiting """) entries = yaml.load(specification, Loader=yaml.SafeLoader) # todo: this increment wouldn't work because at each call, there will be a new instance of the class self.number = self.number + 1 return entries
def _run(args): """ An internal command that executes as part of a process map a given command. args is a dict and must include * command * shell It returns a dict of the form * command * stdout & stderr * returncode * success :param args: command dict :return: """ hostname = os.uname()[1] host = args.get("host") shell = args.get("shell") if host == hostname: command = args.get("execute") result = subprocess.getoutput(command) stderr = "" returncode = 0 stdout = result else: command = args.get("command") result = subprocess.run( command, capture_output=True, shell=shell) result.stdout = result.stdout.decode("utf-8", "ignore").strip() if result.stderr == b'': result.stderr = None stderr = result.stderr returncode = result.returncode stdout = result.stdout data = { 'host': args.get("host"), 'command': args.get("command"), 'execute': args.get("execute"), 'stdout': stdout, 'stderr': stderr, 'returncode': returncode, 'success': returncode == 0, 'date': DateTime.now(), 'cmd': " ".join(args.get("command")) } return data
def list(self, path, dir_only=False, recursive=False): """ adds a list action to the queue list the directory in the storage service :param service: service must be either source or destination :param path: :return: """ date = DateTime.now() uuid_str = str(uuid.uuid1()) specification = textwrap.dedent(f""" cm: number: {self.number} kind: storage id: {uuid_str} cloud: {self.name} name: {path} collection: {self.collection} created: {date} action: list path: {path} dir_only:{dir_only} recursive:{recursive} status: waiting """) entries = yaml.load(specification, Loader=yaml.SafeLoader) self.number = self.number + 1 return entries
def copy(self, sourcefile, destinationfile, recursive=False): """ adds a copy action to the queue copies the file from the source service to the destination service using the file located in the path and storing it into the remote. If remote is not specified path is used for it. The copy will not be performed if the files are the same. :param sourcefile: :param destinationfile: :param recursive: :return: """ date = DateTime.now() uuid_str = str(uuid.uuid1()) specification = textwrap.dedent(f""" cm: number: {self.number} name: "{sourcefile}:{destinationfile}" kind: storage id: {uuid_str} cloud: {self.name} collection: {self.collection} created: {date} action: copy source: path: {sourcefile} destination: path: {destinationfile} recursive: {recursive} status: waiting """) # # TODO: consider # # removing cm from the specification and using # # specification = textwrap.dedent(f""" # action: copy # source: # path: {sourcefile} # destination: # path: {destinationfile} # recursive: {recursive} # status: waiting # """) # # specification = self._add_cm(**locals()) + specification.strip() + "\n" # # EVALUATE and adapt # entries = yaml.load(specification, Loader=yaml.SafeLoader) # todo: this increment wouldn't work because at each call, there will be a new instance of the class self.number = self.number + 1 return entries
def _get(item, key): try: tmp = str(d[item][key]) if tmp == "None": tmp = show_none elif humanize and key in humanize: tmp = parser.parse(tmp) tmp = DateTime.humanize(start - tmp) # tmp = naturaltime(start - tmp) except: tmp = ' ' return tmp
def gather_keys( username=None, hosts=None, filename="~/.ssh/id_rsa.pub", key="~/.ssh/id_rsa", processors=3, dryrun=False): """ returns in a list the keys of the specified hosts :param username: :param hosts: :param filename: :param key: :param dryrun: :return: """ names = Parameter.expand(hosts) results_key = Host.ssh(hosts=names, command='cat .ssh/id_rsa.pub', username=username, verbose=False) #results_authorized = Host.ssh(hosts=names, # command='cat .ssh/id_rsa.pub', # username=username, # verbose=False) filename = path_expand(filename) localkey = { 'host': "localhost", 'command': [''], 'execute': "", 'stdout': readfile(filename).strip(), 'stderr': None, 'returncode': True, 'success': True, 'date': DateTime.now() } if results_key is None: # and results_authorized is None: return "" # geting the output and also removing duplicates output = [localkey['stdout']] + \ list(set([element["stdout"] for element in results_key])) output = '\n'.join(output) + "\n" return output
def add_cm(self, cm_name): date = DateTime.now() uuid_str = str(uuid.uuid1()) spec = textwrap.dedent(f""" cm: number: {self.number} name: "{cm_name}" kind: storage id: {uuid_str} cloud: {self.name} collection: {self.collection} created: {date} """).strip() + "\n" return spec
def cancel(self, id=None): """ cancels a job with a specific id :param id: :return: """ # if None all are canceled date = DateTime.now() uuid_str = str(uuid.uuid1()) specification = self.create_spec( f""" action: cancel status: waiting """, **locals()) entries = yaml.load(specification, Loader=yaml.SafeLoader) self.number = self.number + 1 return entries
def mkdir(self, path): """ adds a mkdir action to the queue create the directory in the storage service :param path: :return: """ date = DateTime.now() uuid_str = str(uuid.uuid1()) specification = self.create_spec( f""" action: mkdir path: {path} status: waiting """, **locals()) entries = yaml.load(specification, Loader=yaml.SafeLoader) self.number = self.number + 1 return entries
def delete(self, path, recursive=True): """ adds a delete action to the queue :param path: :param recursive: :return: """ date = DateTime.now() uuid_str = str(uuid.uuid1()) specification = self.create_spec( f""" action: delete source: path: {path} recursive: {recursive} status: waiting """, **locals()) entries = yaml.load(specification, Loader=yaml.SafeLoader) self.number = self.number + 1 return entries
def _copy_file(self, sourcefile, destinationfile): """ adds a copy action to the queue copies the file from the source service to the destination service using the file located in the path and storing it into the remote. If remote is not specified path is used for it. The copy will not be performed if the files are the same. :param sourcefile: :param destinationfile: :return: """ date = DateTime.now() uuid_str = str(uuid.uuid1()) specification = textwrap.dedent(f""" cm: number: {self.number} name: "{self.source}:{sourcefile}" kind: storage id: {uuid_str} cloud: {self.collection} collection: {self.collection} created: {date} action: copy source: service: {self.source} path: {sourcefile} destination: service: {self.destination} path: {destinationfile} status: waiting """) entries = yaml.load(specification, Loader=yaml.SafeLoader) self.number = self.number + 1 return entries
def cancel(self, id=None): """ cancels a job with a specific id :param id: :return: """ # if None all are canceled date = DateTime.now() uuid_str = str(uuid.uuid1()) specification = textwrap.dedent(f""" cm: number: {self.number} name: "{id}" kind: storage id: {uuid_str} cloud: {self.name} collection: {self.collection} created: {date} action: cancel status: waiting """) entries = yaml.load(specification, Loader=yaml.SafeLoader) self.number = self.number + 1 return entries
def gather_keys(username=None, hosts=None, filename="~/.ssh/id_rsa.pub", key="~/.ssh/id_rsa", processors=3, dryrun=False): """ returns in a list the keys of the specified hosts :param username: :param hosts: :param filename: :param key: :param dryrun: :return: """ names = Parameter.expand(hosts) jobSet = JobSet("ssh_keygen", executor=JobSet.ssh) command = "cat .ssh/id_rsa.pub" for host in names: jobSet.add({"name": host, "host": host, "command": command}) jobSet.run(parallel=len(hosts)) results_key = [] #jobSet.Print() for key in jobSet.array(): stdout = key['stdout'].decode('UTF-8') if "Command could not run" not in stdout: results_key.append(stdout) #results_key = Host.ssh(hosts=names, # command='cat .ssh/id_rsa.pub', # username=username, # verbose=False) #results_authorized = Host.ssh(hosts=names, # command='cat .ssh/id_rsa.pub', # username=username, # verbose=False) filename = path_expand(filename) localkey = { 'host': "localhost", 'command': [''], 'execute': "", 'stdout': readfile(filename).strip(), 'stderr': None, 'returncode': True, 'success': True, 'date': DateTime.now() } if results_key is None: # and results_authorized is None: return "" # geting the output and also removing duplicates output = [localkey['stdout']] + \ results_key output = '\n'.join(output) + "\n" return output
def dict_table(cls, d, order=None, header=None, sort_keys=True, show_none="", humanize=None, max_width=48): """ prints a pretty table from an dict of dicts :param d: A a dict with dicts of the same type. Each key will be a column :param order: The order in which the columns are printed. The order is specified by the key names of the dict. :param header: The Header of each of the columns :type header: A list of string :param sort_keys: Key(s) of the dict to be used for sorting. This specify the column(s) in the table for sorting. :type sort_keys: string or a tuple of string (for sorting with multiple columns) :param show_none: prints None if True for None values :type show_none: string :param max_width: maximum width for a cell :type max_width: int """ start = DateTime.now() def _keys(): all_keys = [] for e in d: keys = d[e].keys() all_keys.extend(keys) return list(set(all_keys)) # noinspection PyBroadException def _get(item, key): try: tmp = str(d[item][key]) if tmp == "None": tmp = show_none elif humanize and key in humanize: tmp = parser.parse(tmp) tmp = DateTime.humanize(start - tmp) # tmp = naturaltime(start - tmp) except: tmp = ' ' return tmp if d is None or d == {}: return None if order is None: order = _keys() if header is None and order is not None: header = order elif header is None: header = _keys() x = PrettyTable(header) x.max_width = max_width if sort_keys: if type(sort_keys) is str: sorted_list = sorted(d, key=lambda x: d[x][sort_keys]) elif type(sort_keys) == tuple: sorted_list = sorted( d, key=lambda x: tuple( [d[x][sort_key] for sort_key in sort_keys])) else: sorted_list = d else: sorted_list = d for element in sorted_list: values = [] for key in order: value = _get(element, key) values.append(value) x.add_row(values) x.align = "l" return x
def update_dict(self, elements, kind=None): """ THis function adds a cloudmesh cm dict to each dict in the list elements. Libcloud returns an object or list of objects With the dict method this object is converted to a dict. Typically this method is used internally. :param elements: the list of original dicts. If elements is a single dict a list with a single element is returned. :param kind: for some kinds special attributes are added. This includes key, vm, image, flavor. :return: The list with the modified dicts """ if elements is None: return None elif type(elements) == list: _elements = elements else: _elements = [elements] d = [] for entry in _elements: if "cm" not in entry: entry['cm'] = {} if kind == 'ip': entry['name'] = entry['floating_ip_address'] entry["cm"].update({ "kind": kind, "driver": self.cloudtype, "cloud": self.cloud, "name": entry['name'] }) if kind == 'key': try: entry['comment'] = entry['public_key'].split(" ", 2)[2] except: entry['comment'] = "" entry['format'] = \ entry['public_key'].split(" ", 1)[0].replace("ssh-", "") elif kind == 'vm': entry["cm"]["updated"] = str(DateTime.now()) if "created_at" in entry: entry["cm"]["created"] = str(entry["created_at"]) # del entry["created_at"] if 'status' in entry: entry["cm"]["status"] = str(entry["status"]) else: entry["cm"]["created"] = entry["modified"] elif kind == 'flavor': entry["cm"]["created"] = entry["updated"] = str(DateTime.now()) elif kind == 'image': entry["cm"]["created"] = entry["updated"] = str(DateTime.now()) # elif kind == 'secgroup': # pass d.append(entry) return d
def update_dict(self, elements, kind=None): """ This function adds a cloudmesh cm dict to each dict in the list elements. Libcloud returns an object or list of objects With the dict method this object is converted to a dict. Typically this method is used internally. :param elements: the list of original dicts. If elements is a single dict a list with a single element is returned. :param kind: for some kinds special attributes are added. This includes key, vm, image, flavor. :return: The list with the modified dicts """ if elements is None: return None elif type(elements) == list: _elements = elements else: _elements = [elements] d = [] for entry in _elements: if "cm" not in entry: entry['cm'] = {} if kind == 'ip': entry['name'] = entry['_ip_address'] entry["cm"].update({ "kind": kind, "driver": self.cloudtype, "cloud": self.cloud, "updated": str(DateTime.now()) }) if kind == 'key': try: entry['comment'] = entry['public_key'].split(" ", 2)[2] except: entry['comment'] = "" entry['format'] = \ entry['public_key'].split(" ", 1)[0].replace("ssh-", "") elif kind == 'vm': entry['name'] = entry["cm"]["name"] = entry["_display_name"] entry['_image'] = self.compute.get_image( entry['_image_id']).data.display_name private = self.get_private_ipobj(entry['_id']) if private: details = oci.core.models.GetPublicIpByPrivateIpIdDetails( private_ip_id=private.id) public = self.virtual_network.get_public_ip_by_private_ip_id( details).data if public: entry['ip_public'] = public.ip_address entry['ip_private'] = private.ip_address entry["cm"]["created"] = str(entry["_time_created"]) entry["status"] = entry["cm"]["status"] = str( entry["_lifecycle_state"]) entry['_launch_options'] = entry['_launch_options'].__dict__ entry['_source_details'] = entry['_source_details'].__dict__ entry['_agent_config'] = entry['_agent_config'].__dict__ elif kind == 'flavor': entry['name'] = entry["cm"]["name"] = entry["_shape"] entry["cm"]["created"] = str(DateTime.now()) elif kind == 'image': entry['name'] = entry["cm"]["name"] = entry["_display_name"] entry["cm"]["created"] = str(DateTime.now()) entry['_launch_options'] = entry['_launch_options'].__dict__ elif kind == 'secgroup': entry['name'] = entry["cm"]["name"] = entry["_display_name"] key_id = "_id" if key_id in entry.keys(): entry["oracle_id"] = entry[key_id] entry.pop(key_id) d.append(entry) return d