def fix_keys_file(filename): # concatenate ~/.ssh/id_rsa.pub lines = readfile(filename) key = readfile(path_expand("~/.ssh/id_rsa.pub")) authorized_keys = lines + key keys = ''.join(authorized_keys) # remove duplicates and empty lines keys_list = [x for x in list(set(keys.splitlines())) if x != '\n'] keys = ('\n'.join(keys_list) + '\n') writefile(filename, str(keys))
def create_metadata(metadata, location): location = path_expand(location) Path(os.path.dirname(location)).mkdir(parents=True, exist_ok=True) if not os.path.isfile(location): metadata_file = pkg_resources.resource_filename( "bookmanager", 'template/epub/metadata.txt') meta = copy.deepcopy(metadata) for field in ["author", "title"]: meta[field] = meta[field].replace("\n", " ") content = readfile(metadata_file) content = content.format(**meta) writefile(location, content)
def cat_bibfiles(directory, output): d = path_expand(directory) bibs = list(Path(d).glob("**/*.bib")) pprint(bibs) r = "" for bib in bibs: bib = str(bib) content = readfile(bib) r = r + "\n\n% " + bib + "\n\n" + content writefile(output, r) return list(bibs)
def set(ssid, password, dryrun=False): if ssid is None or password is None: Console.error("SSID or password not set") if dryrun: password = "******" config = Wifi.template.format(**locals()) \ .replace("BEGIN", "{").replace("END", "}") if dryrun: print(Wifi.location) print(config) else: try: writefile(Wifi.location, config) except FileNotFoundError as e: Console.error(f"The file does not exist: {Wifi.location}")
def convert(label, tags, categories, projects, view): """Simple program that greets NAME for a total of COUNT times.""" filename = f"content/publication/{label}/index.md" content = readfile(filename) if tags: content = content.replace("tags: []", f'tags: ["{tags}"]') if categories: content = content.replace("categories: []", f'categories: ["{categories}"]') if projects: content = content.replace("projects: []", f'projects: ["{projects}"]') writefile(filename, content) if view: print(content)
def yaml_to_json(name, filename="~/.cloudmesh/google.json"): """ given the name in the yaml file, takes the information form that object and creates the json file that cna be conveniently used by google :param name: :param filename: :return: """ # print ("AAAA") config = Config() configuration = config[f"cloudmesh.storage.{name}"] credentials = config[f"cloudmesh.storage.{name}.credentials"] # generate json writefile(filename, json.dumps(credentials, indent=2) + "\n")
def ssh_config_add(self, label, host, user): config = readfile("~/.ssh/config") if f"Host {label}" in config: Console.warning(f"{label} is already in ~/.ssh/config") else: entry = textwrap.dedent(f""" Host {label} Hostname {host} User {user} IdentityFile ~/.ssh/id_rsa """) Console.info(f"adding {label} to ~/.ssh/config\n" + textwrap.indent(entry, prefix=" ")) config = config + entry writefile("~/.ssh/config", config)
def add_script(filename, script): """ adds all the lines of the script to the filename, if the line of the script does not already exist. It is useful to add lines to for example the .bashrc script :param filename: :param script: :return: """ script = readfile(filename) for line in script: script = Installer.add_line(script, line) writefile(filename, script)
def get_vendor(self): """ Retrieves the names of vendors from linux-usb.org :return: the content of the file :rtype: str """ filename = 'usb.ids' full_path = path_expand(f"~/.cloudmesh/cmburn/{filename}") if not os.path.isfile(full_path): r = requests.get(f'http://www.linux-usb.org/{filename}') content = r.text writefile(full_path, content) else: content = readfile(full_path) return content
def sudo_writefile(filename, content, append=False): os.system('mkdir -p ~/.cloudmesh/tmp') tmp = "~/.cloudmesh/tmp/tmp.txt" if append: content = sudo_readfile(filename, split=False) + content writefile(tmp, content) result = subprocess.getstatusoutput(f"sudo cp {tmp} {filename}") # If exit code is not 0 if result[0] != 0: Console.warning(f"{filename} was not created correctly -> {result[1]}") return result[1]
def setup(self): config = Config(config_path="~/.cloudmesh/cloudmesh.yaml") spec = config["cloudmesh.storage"] local_target = spec["local"]["default"]["directory"] # print("=============> ", local_target) # print("=============> ", str(Path(local_target))) self.file_name = "test_transfer_local_s3.txt" self.azure_file_name = "test_transfer_local_azure.txt" # Create a dummy file in local storage at local_target self.location = f"{local_target}/{self.file_name}" writefile(path_expand(self.location), content="Test file.") self.location_azure = f"{local_target}/{self.azure_file_name}" writefile(path_expand(self.location_azure), content="Test file.")
def create_metadata(metadata, location, kind="epub"): location = path_expand(location) Path(os.path.dirname(location)).mkdir(parents=True, exist_ok=True) metadata_file = pkg_resources.resource_filename( "bookmanager", f'template/{kind}/metadata.txt') meta = copy.deepcopy(metadata) if "date" not in meta: meta["date"] = str(datetime.datetime.now()) for field in ["author", "title"]: meta[field] = meta[field].replace("\n", " ") content = readfile(metadata_file) content = content.format(**meta) writefile(location, content) os.system("sync")
def set(ssid=None, password=None, country="US", psk=True, location=location, sudo=False): """ Sets the wifi. Only works for psk based wifi :param ssid: The ssid :type ssid: str :param password: The password :type password: str :param country: Two digit country code :type country: str :param psk: If true uses psk authentication :type psk: bool :param location: The file where the configuration file should be written to :type location: str :param sudo: If tru the write will be done with sudo :type sudo: bool :return: True if success :rtype: bool """ if ssid is None or (psk and password is None): Console.error("SSID or password not set") return False if psk: config = Wifi.template.format(**locals()) else: config = Wifi.template_key.format(**locals()) try: if sudo: Sudo.writefile(location, config) else: writefile(location, config) except FileNotFoundError as e: # noqa: F841 Console.error(f"The file does not exist: {location}") return False return True
def add_link_to_file(url, filename, variables): lines = readfile(Path(f"{filename}")) lines = lines.splitlines() if "{" in lines[0]: headline, ref = lines[0].split("{", 1) lines[0] = headline + f" [:cloud:]({url}) " + "{" + ref else: lines[0] = lines[0] + f" [:cloud:]({url})" lines[0] = lines[0].replace("raw.githubusercontent.com", "github.com") #lines[0] = lines[0].replace("/master/", "/master/master/") lines[0] = lines[0].replace("/master/", "/blob/master/") if 'file.base' in variables and "file.github" in variables: path = str(Path(variables["file.base"]).resolve()) lines[0] = lines[0].replace(path, variables["file.github"]) lines = '\n'.join(lines) writefile(filename, lines)
def __init__(self, infile, outfile, indent=2): if ".py" in outfile: print("... converting", infile, "->", outfile) try: r = Shell.execute("evegenie", [infile]) print (r) except Exception as e: print ("E", str(e)) if "ERROR:" in e: print("Error: cloudmesh.evegenie is not installed") elif ".yml" in infile and ".json" in outfile: element = yaml.safe_load(open(infile)) print("... writing to", outfile) writefile(outfile, json.dumps(element, indent=indent)) else: print("conversion not yet supported")
def fetch(self, url=None, destination=None): """ fetches the cloudmesh yaml file and places it in the given destination dir :param url: The url of the cloudmesh.yaml file from github :param destionation: The destination file. If not specified it is the home dir. :return: """ if url is None: url = "https://raw.githubusercontent.com/cloudmesh/cloudmesh-configuration/master/cloudmesh/configuration/etc/cloudmesh.yaml" if destination is None: destination = "~/.cloudmesh/cloudmesh.yaml" destination = path_expand(destination) Shell.mkdir("~/.cloudmesh") r = requests.get(url) content = r.text writefile(destination, content)
def create_file(self, location, content): Shell.mkdir(os.dirname(path_expand(location))) writefile(location, content)
def main_generate(class_name, directory, port=8000): # # set up dir structure # Shell.mkdir(f"{directory}/{class_name}") Shell.mkdir(f"{directory}/{class_name}/cloudmesh") Shell.mkdir(f"{directory}/{class_name}/data") writefile(f"{directory}/{class_name}/cloudmesh/__init__.py", "") # # Type table type_table = { 'matrix': 'array', 'array': 'array', 'array-like': 'array', 'numpy array': 'array', 'bool': 'boolean', 'int': 'integer', 'float': 'number' } # The module to read module = sklearn.linear_model # The classes to read from the module classes = [class_name] # If type table is specified, it will read all classes in the module sigs = SignatureScraper().get_signatures( module=module, classes=classes, type_table=type_table) template_folder = os.path.join((os.path.dirname(__file__)), 'templates') directory = path_expand(directory) # print(template_folder) # print(directory) generator = CodeGenerator( func_signatures=sigs, cwd=directory, # BUG: THIS IS WRONG function_operation_id_root='.', file_operation_id_root='cloudmesh.analytics.build.file', server_url=f'http://localhost:{port}/cloudmesh/{class_name}', template_folder=template_folder, output_folder=directory, port=port, service=class_name ) generator.generate_api_specification( output_name=f'{class_name}/{class_name}.yaml', template_name='component.j2') generator.generate_handlers( output_name=f'{class_name}/cloudmesh/{class_name}.py', template_name='handlers.j2') generator.generate_file_operations( output_name=f'{class_name}/cloudmesh/file.py', template_name='file.py') # # Generate the server code while using a build in cloudmesh specific server # server = OpenAPIServer( host="127.0.0.1", path=".", spec=f"{class_name}.yaml", key="dev") server.write(f'{directory}/{class_name}/{class_name}_server.py') generator.output_folder = os.path.join((os.path.dirname(__file__)), 'command') print(directory)
def create_file(self, location, content): self.create_dir(location) writefile(path_expand(location), content)
def create_file(self, location, content): print(f"create: {location}") Shell.mkdir(location) writefile(location, content)
"[{name}]({x})".format(x=x, name=os.path.basename(x).replace('.py', '')) for x in tests ] tests = " * " + "\n * ".join(links) # # get manual # if repo == "cloudmesh-installer": manual = Shell.run("cloudmesh-installer --help") else: manual = Shell.run(f"cms help {command}") man = [] start = False for line in manual.splitlines(): start = start or "Usage:" in line if start: if not line.startswith("Timer:"): man.append(line) manual = textwrap.dedent('\n'.join(man)).strip() manual = "```bash\n" + manual + "\n```\n" # # create readme # source = readfile("README-source.md") readme = source.format(**locals()) writefile("README.md", readme)
def _print(self, name, data, kind, directory=None): content = self._man_content(data, kind) if directory is None: print(content) else: writefile(f"{directory}/{name}.{kind}", content)
def generate(self, output): banner(f"Creating {output}", c="&") files = [] for entry in self.docs.entries: if entry.kind in ["section", "header"]: url = entry.url path = entry.path basename = entry.basename local = entry.destination files.append(local) banner("Finding Contents") print("Number of included Sections:", len(self.docs.entries)) banner("Creating Command") files = " ".join(files) # metadata["stylesheet"] = path_expand(metadata["stylesheet"]) title = self.metadata["title"] dirs = [] for section in self.docs.entries: if section["kind"] == "section": # pprint(section) path = section["path"] dirs.append(path_expand(f"./dest/book/{path}")) dirs = set(dirs) # dirs = find_image_dirs(directory='./dest') if output in ["pdf"]: create_metadata(self.metadata, "./dest/book/metadata.txt", kind="latex") else: create_metadata(self.metadata, "./dest/book/metadata.txt", kind="epub") from cloudmesh.common.Shell import Shell r = Shell.cat("./dest/book/metadata.txt") if self.verbose: banner(r) create_css(self.metadata, "./dest/book/epub.css") directories = (":".join(dirs)) metadata = path_expand("./dest/book/metadata.txt") filename = self.metadata["filename"] for file in ["report.bib", "references.bib"]: try: copyfile(file, f"dest/{file}") except: pass cat_bibfiles("./dest", "./dest/all.bib") bib = path_expand("./dest/all.bib") csl = path_expand("./dest/book/ieee-with-url.csl") bibfile = f" --metadata link-citations=true --bibliography={bib} --csl={csl}" all_bibs = readfile("./dest/all.bib") css_style = pkg_resources.resource_filename( "bookmanager", 'template/epub/ieee-with-url.csl') copyfile(css_style, path_expand("./dest/book/ieee-with-url.csl")) if "@" not in all_bibs: bibfile = "" for f in [ 'template/latex/listings-setup.tex', 'template/latex/eisvogel.latex', 'template/empty.md' ]: source = pkg_resources.resource_filename("bookmanager", f) _filename = os.path.basename(source) copyfile(source, f"dest/{_filename}") options = "--toc --toc-depth=6 --number-sections -F pandoc-crossref --citeproc --from markdown-smart" resources = f"--resource-path={directories}" markdown = "--verbose --filter pandoc-crossref --citeproc -f markdown+emoji+smart --indented-code-classes=bash,python,yaml" pdf_options = "--verbose -f markdown+emoji+smart --listings --indented-code-classes=bash,python,yaml" \ " --include-in-header ./dest/listings-setup.tex --template ./dest/eisvogel " # GGGG markdown = "--verbose -f markdown+emoji --indented-code-classes=bash,python,yaml" # fonts = '-V mainfonts="DejaVu Sans"' pdffonts = '' embed = [ "DejaVuSerif-Bold.ttf", "DejaVuSerif-BoldItalic.ttf", "DejaVuSerif-Italic.ttf", "DejaVuSerif.ttf", "DejaVuSerifCondensed-Bold.ttf", "DejaVuSerifCondensed-BoldItalic.ttf", "DejaVuSerifCondensed-Italic.ttf", "DejaVuSerifCondensed.ttf" ] embed = [ "OpenSans-Bold.ttf", "OpenSans-BoldItalic.ttf", "OpenSans-Emoji.ttf", "OpenSans-ExtraBold.ttf", "OpenSans-ExtraBoldItalic.ttf", "OpenSans-Italic.ttf", "OpenSans-Light.ttf", "OpenSans-LightItalic.ttf", "OpenSans-Regular.ttf", "OpenSans-Semibold.ttf", "OpenSans-SemiboldItalic.ttf", "OpenSansEmoji.ttf", ] # ignoring font embedding epubfonts = '' #for font in embed: # epubfonts = epubfonts + f' --epub-embed-font=fonts/{font}' if output in ["epub"]: epub = path_expand(f"./dest/{filename}") # noinspection PyPep8 command = f'cd dest/book; pandoc {options} {markdown} ' \ f' {epubfonts} {resources} {bibfile} ' \ f' -o {epub} {files}' \ f' {metadata}' if self.verbose: self.print_command(command) elif output == "pdf": create_metadata(self.metadata, "./dest/book/metadata.txt", kind="latex") pdf = path_expand(f"./dest/{filename}").replace(".epub", ".pdf") tex = path_expand(f"./dest/{filename}").replace(".epub", ".tex") md = path_expand(f"./dest/{filename}").replace(".epub", ".md") metadata = "./dest/book/metadata.txt" # path= Path("../../bookmanager/bookmanager/template/latex/eisvogel").resolve() book = "-V titlepage=true" #latex = f"--template {path} --pdf-engine=xelatex" # latex = f"--pdf-engine=pdflatex --indented-code-classes=bash,python,yaml" latex = f"--pdf-engine=pdflatex --indented-code-classes=bash,python,yaml" command = f'pandoc' \ f' {files} ' \ f' --to=markdown > {md}' self.print_command(command) os.system(command) content = readfile(md) content = content \ .replace("µ","micro") \ .replace(":cloud:","\\faGithub")\ .replace(":o2:","\\faBug")\ .replace("\\lstinline!\\faBug!","\\faBug") writefile(md, content) command = f'pandoc -s {options} {pdf_options} {pdffonts}' \ f' {bibfile} {latex} {book} {resources} ' \ f' {md} ' \ f' {metadata} --from=markdown -o {pdf}' elif output == "html": metadata = "./dest/metadata.txt" options = "--toc --number-sections" command = f'pandoc {options} -o ./dest/book.html {files}' elif output == "docx": metadata = "./dest/metadata.txt" options = "--toc --number-sections" command = f'pandoc {options} -o ./dest/book.docx {files}' elif output in ["md", "markdown"]: metadata = "./dest/metadata.txt" options = "--toc --number-sections -f markdown+smart" command = f'pandoc {options} -o ./dest/book.md {files}' elif output in ["tex"]: metadata = "./dest/metadata.txt" options = "--toc --number-sections" command = f'pandoc {options} -o ./dest/book.tex {files}' else: raise ValueError( f"this output format is not yet supported: {output}") if self.verbose: banner("COMMAND") self.print_command(command) os.system(command) try: os.system("sync") except: pass
def create_section(filename, header, n): writefile(filename, ("#" * n) + f" {header}\n\n")
def replace_in_file(filename, old_text, new_text): content = readfile(filename) content = content.replace(old_text, new_text) writefile(filename, content)
def create_file(self, location, content): self.create_dir(location) writefile(location, content)
def create_version_cache(refresh=False): """ creates a cache of all released pi images :param refresh: refresh it from the Web if True :type refresh: bool :return: writes it into ~/.cloudmesh/cmburn/distributions.yaml :rtype: file """ data = { "lite": [], "full": [], "lite-64": [], "full-64": [], "lite-legacy": [], "full-legacy": [] } cache = Path( os.path.expanduser("~/.cloudmesh/cmburn/distributions.yaml")) def fetch_kind(kind=None): print(f"finding {kind} repos ...") image = Image() location = f"{image.raspberry_images[kind]}" repos = [location] latest = {'date': "1900-01-01"} for repo in repos: versions, downloads = Image.versions(repo) for version, download in zip(versions, downloads): entry = { "version": version, "tag": version.replace("raspios_", "").replace("_armhf", ""), "url": download, "date": version.split("-", 1)[1], "type": kind, "os": "raspberryos", } data[kind].append(entry) if entry["date"] >= latest['date']: latest = dict(entry) latest["tag"] = f"latest-{kind}" data[kind].append(latest) if refresh or not cache.exists(): os.system("mkdir -p ~/.cloudmesh/cmburn") fetch_kind(kind="lite") fetch_kind(kind="full") fetch_kind(kind="lite-64") fetch_kind(kind="full-64") fetch_kind(kind="lite-legacy") fetch_kind(kind="full-legacy") writefile(cache, yaml.dump(data)) data = readfile(cache) data = yaml.safe_load(readfile(cache)) # convert to array result = data["lite"] + data["full"] + data["lite-64"] + data["full-64"] \ + data["lite-legacy"] + data["full-legacy"]+ Ubuntu.distribution return result
def do_man(self, arg, arguments): """ :: Usage: man readme [-p] --toc [--file=FILE] man readme [-p] [--tag=TAG] [--file=FILE] --include=INCLUDE man readme [-p] [--tag=TAG] [--file=FILE] --command=COMMAND man [--dir=DIR] [--format=FORMAT] [--noheader] man COMMANDS... [--dir=DIR] [--format=FORMAT] Options: --toc adds a table of content between the TOC tag -p replacement in the file instead of stdout --noheader no rst header --tag=TAG the tag used to embed the manual page [default: MANUAL] --file=FILE the file for the replacement between the tags [default: README.md] Arguments: COMMANDS the command manual pages to be printed Description: man Prints out the help pages man COMMAND Prints out the help page for a specific command """ cmds_doc = [] cmds_undoc = [] help_page = {} def get_manual_pages(): names = self.get_names() for name in names: if name[:5] == 'help_': help_page[name[5:]] = 1 names.sort() # There can be duplicates if routines overridden prevname = '' for name in names: if name[:3] == 'do_': if name == prevname: continue prevname = name cmd = name[3:] if cmd in help_page: cmds_doc.append(cmd) del help_page[cmd] elif getattr(self, name).__doc__: cmds_doc.append(cmd) else: cmds_undoc.append(cmd) arguments.kind = arguments["--format"] or "md" arguments.directory = arguments["--dir"] arguments.file = arguments["--file"] or "README.md" arguments.tag = arguments["--tag"] or "MANUAL" get_manual_pages() if arguments["--dir"]: d = arguments["--dir"] Shell.mkdir(d) if arguments["readme"] and arguments["--toc"]: in_place = arguments["-p"] if in_place: man = Shell.run(f"md_toc -p github {arguments.file}") print(man) else: man = Shell.run(f"md_toc github {arguments.file}") print(man) elif arguments["readme"] and arguments["--include"]: in_place = arguments["-p"] manpage, old = self._convert_file(file=arguments.file, include=arguments["--include"], tag=arguments.tag) if in_place and manpage != old: writefile(arguments.file, manpage) else: print(manpage) elif arguments["readme"] and arguments["--command"]: in_place = arguments["-p"] manpage, old = self._convert_file(file=arguments.file, command=arguments["--command"], tag=arguments.tag) if in_place and manpage != old: writefile(arguments.file, manpage) else: print(manpage) elif arguments["readme"]: in_place = arguments["-p"] manpage, old = self._convert_file(arguments.file, arguments.COMMAND, arguments.tag) if in_place and manpage != old: writefile(arguments.file, manpage) else: print(manpage) elif len(arguments.COMMANDS) == 0: for entry in cmds_doc: data = self._get_help(entry) self._print(entry, data, arguments.kind, arguments.directory) else: commands = arguments.COMMANDS for entry in commands: if entry in cmds_doc: data = self._get_help(entry) self._print(entry, data, arguments.kind, arguments.directory) else: Console.error(f"Cloud not find man page for {entry}") return ""
def _configure_worker_interfaces(cls, worker, user='******'): """ Configures the network interface of the worker to use the master as an internet gateway :param worker: A single string hostname for the worker (ie. --hostname option from cm-pi-burn) :param user: The user we will use to ssh/scp into the worker :return: """ if cls.dryrun: Console.info("Configuring worker info.") Console.info(f"scp /etc/network/interfaces from {cls.master} to {user}@{worker}") Console.info("Configure default gateway and DNS for {cls.priv_interface} on {user}@{worker}") else: # Get gateway and netmask for worker conf = cls._system('ifconfig') conf = conf.split('\n') # Search for priv_interface info = None for i in range(len(conf)): if cls.priv_interface in conf[i]: info = conf[i + 1].split() if info == None: Console.error(f"Interface {cls.priv_interface} not found") sys.exit(1) elif info[0] != 'inet': Console.error(f"Interface {cls.priv_interface} found, but there appears to be no iPv4 connection") sys.exit(1) else: # info ex: ['inet', '192.168.1.34', 'netmask', '255.255.255.0', 'broadcast', '192.168.1.255'] gateway = info[1] netmask = info[3] # Use scp to get /etc/network/interfaces from worker cls._system('mkdir -p ~/.cloudmesh/tmp') tmp = f'~/.cloudmesh/tmp/{worker}-interfaces.tmp' ignore_setting = '-o "StrictHostKeyChecking no"' StopWatch.start(f'Talking to {user}@{worker}') exit_code = cls._system(f'scp {ignore_setting} {user}@{worker}:/etc/network/interfaces {tmp}', exitcode=True) StopWatch.stop(f'Talking to {user}@{worker}') StopWatch.status(f'Talking to {user}@{worker}', exit_code == 0) # Write new interfaces file try: interfaces = readfile(tmp).rstrip().split('\n') except: Console.error(f"Could not open {tmp}") sys.exit(1) try: ind = interfaces.index(f'auto {cls.priv_interface}') except: Console.error(f"Could not find {cls.priv_interface} configuration in interfaces file") interface_config = [line.lstrip() for line in interfaces[ind: ind + 3]] interface_config.append(f'gateway {gateway}') interface_config.append(f'netmask {netmask}') dnss = " ".join(cls.dns) + "\n" interface_config.append(f'dns-nameservers {dnss}') new_config = interfaces[:ind] + interface_config writefile(tmp, '\n'.join(new_config)) # New config file now written on local machine. Move to worker in tmp directory remote_cmd1 = 'mkdir -p ~/.cloudmesh/tmp' remote_path = '~/.cloudmesh/tmp/interface.tmp' cls._system(f'ssh {ignore_setting} {user}@{worker} {remote_cmd1}') cls._system(f'scp {ignore_setting} {tmp} {user}@{worker}:{remote_path}') remote_cmd2 = 'sudo cp ~/.cloudmesh/tmp/interface.tmp /etc/network/interfaces' cls._system(f'ssh {ignore_setting} {user}@{worker} {remote_cmd2}')
def create_file(self, location, content): d = Path(os.path.dirname(path_expand(location))) d.mkdir(parents=True, exist_ok=True) writefile(path_expand(location), content)