def run( args: List, *, cwd: str = None, check: bool = True, hide_output: bool = True, timeout: int = None, ) -> subprocess.CompletedProcess: if hide_output: stdout: Optional[int] = subprocess.PIPE else: stdout = None try: return subprocess.run( args, stdout=stdout, stderr=subprocess.STDOUT, cwd=cwd, check=check, encoding="utf-8", timeout=timeout, ) except subprocess.CalledProcessError as exc: log.error( f"Failed executing {' '.join((str(arg) for arg in args))}:\n\n{exc.stdout}" ) raise exc
def build_blobs(client, blobs, credentials): num = len(blobs) if num == 0: log.success("No blobs to process!") return log.info("Let's build some docs!") blobs_str = "\n".join(map(lambda blob: blob.name, blobs)) log.info(f"Processing {num} blob{'' if num == 1 else 's'}:\n{blobs_str}") # Clone doc-templates. templates_dir, devsite_template = setup_templates() # Process every blob. failures = [] for i, blob in enumerate(blobs): try: log.info(f"Processing {i+1} of {len(blobs)}: {blob.name}...") process_blob(blob, credentials, devsite_template) except Exception as e: # Keep processing the other files if an error occurs. log.error(f"Error processing {blob.name}:\n\n{e}") failures.append(blob.name) shutil.rmtree(templates_dir) if len(failures) > 0: failure_str = "\n".join(failures) raise Exception( f"Got errors while processing the following archives:\n{failure_str}" ) log.success("Done!")
def build_one_doc(bucket_name, object_name, credentials): verify(credentials) try: generate.build_one_doc(bucket_name, object_name, credentials) except Exception as e: log.error(e) sys.exit(1)
def build_language_docs(bucket_name, language, credentials): verify(credentials) try: generate.build_language_docs(bucket_name, language, credentials) except Exception as e: log.error(e) sys.exit(1)
def verify(credentials): if not credentials: log.error(( "You need credentials to run this! Specify --credentials on", "the command line.", )) return sys.exit(1) for cmd in REQUIRED_CMDS: if shutil.which(cmd) is None: log.error(f"Could not find {cmd} command!") return sys.exit(1)
def get_xref(xref, bucket, dir): if not xref.startswith(DEVSITE_SCHEME): return xref d_xref = xref[len(DEVSITE_SCHEME) :] lang, pkg = d_xref.split("/", 1) version = "latest" if "@" in pkg: pkg, version = pkg.rsplit("@", 1) if version == "latest": # List all blobs, sort by semver, and pick the latest. prefix = f"{XREFS_DIR_NAME}/{lang}-{pkg}-" blobs = bucket.list_blobs(prefix=prefix) versions = [] for blob in blobs: # Be sure to trim the suffix extension. version = blob.name[len(prefix) : -len(".tar.gz.yml")] # Skip if version is not a valid version, like when some other package # has prefix as a prefix (...foo-1.0.0" and "...foo-beta1-1.0.0"). try: version_sort(version) versions.append(version) except ValueError: pass # Ignore. if len(versions) == 0: # There are no versions, so there is no latest version. log.error(f"Could not find {xref} in gs://{bucket.name}. Skipping.") return "" versions = sorted(versions, key=version_sort) version = versions[-1] d_xref = f"{XREFS_DIR_NAME}/{lang}-{pkg}-{version}.tar.gz.yml" blob = bucket.blob(d_xref) if not blob.exists(): # Log warning. Dependency may not be generated yet. log.error(f"Could not find gs://{bucket.name}/{d_xref}. Skipping.") return "" d_xref_path = dir.joinpath(d_xref).absolute() d_xref_path.parent.mkdir(parents=True, exist_ok=True) blob.download_to_filename(d_xref_path) return str(d_xref_path)
def build_blobs(blobs, credentials): num = len(blobs) if num == 0: log.success("No blobs to process!") return log.info("Let's build some docs!") blobs_str = "\n".join(map(lambda blob: blob.name, blobs)) log.info(f"Processing {num} blob{'' if num == 1 else 's'}:\n{blobs_str}") templates_dir = pathlib.Path("doc-templates") if templates_dir.is_dir(): shutil.rmtree(templates_dir) templates_dir.mkdir(parents=True, exist_ok=True) log.info(f"Cloning templates into {templates_dir.absolute()}") clone_templates(templates_dir) log.info(f"Got the templates ({templates_dir.absolute()})!") devsite_template = templates_dir.joinpath( "third_party/docfx/templates/devsite") failures = [] for blob in blobs: try: process_blob(blob, credentials, devsite_template) except Exception as e: # Keep processing the other files if an error occurs. log.error(f"Error processing {blob.name}:\n\n{e}") failures.append(blob.name) shutil.rmtree(templates_dir) if len(failures) > 0: failure_str = "\n".join(failures) raise Exception( f"Got errors while processing the following archives:\n{failure_str}" ) log.success("Done!")
def prepare_java_toc(toc_file, product_name): with open(toc_file, "r") as yml_input: try: toc = yaml.safe_load(yml_input) # sort list of dict on dict key 'uid' value toc.sort(key=lambda x: x.get("uid")) # include index.md overview page overview = [{"name": "Overview", "href": "index.md"}] toc = overview + toc # include product level hierarchy toc = [{"name": product_name, "items": toc}] with open(toc_file, "w") as f: # Add back necessary docfx comment YamlMime f.write("### YamlMime:TableOfContent\n") yaml.dump(toc, f, default_flow_style=False, sort_keys=False) except yaml.YAMLError as e: log.error("Error parsing java toc file") raise e
def build_local_doc(input_path): try: local_generate.build_local_doc(input_path) except Exception as e: log.error(e) sys.exit(1)