def run_build(options: argparse.ArgumentParser) -> None: if not os.environ.get('CIRCLE_PULL_REQUEST', None) is None: logger.info('Pull Request detected. Skipping Build') return None validate_and_parse_inputs(options) # Find Builds jobs = [] for job in find_build_jobs(options.project_path, options.collection_names, options.category_names, options.notebook_names, options.force_build): jobs.append(job) # Run Build artifact_paths = {} if options.build_mode is BuildMode.Single: for job in jobs: job_context = generate_job_context(job) run_job_context(job_context, True) for notebook in job_context.notebooks: hash_name = f'{notebook.collection_name}-{notebook.category_name}' artifact_paths[hash_name] = notebook.artifact.path else: build_artifacts_concurrently(options, jobs, artifact_paths) for name, path in artifact_paths.items(): logger.info(f'Artifact[{name}] created here: {path}')
def run_sync_notebooks(options: argparse.Namespace) -> None: if not os.path.exists(options.destination_path): raise NotImplementedError( f'Destination Path[{options.destination_path}] does not exist') validate_and_parse_inputs(options) # https://github.com/spacetelescope/dat_pyinthesky/blob/78bfaec05eb9af6280c6d15b6df54886b1aa4e9f/.circleci/builder/factory.py#L59 for job in find_build_jobs(options.project_path, options.collection_names, options.category_names, options.notebook_names, True): notebooks_to_update = {} for notebook in job.category.notebooks: new_path = f'{options.destination_path}/{job.category.name}/{notebook.name}.ipynb' new_dirpath = os.path.dirname(new_path) source_path = f'{options.project_path}/{job.collection.name}/{job.category.name}/{notebook.name}.ipynb' source_dirpath = os.path.dirname(source_path) key = f'{job.collection.name}.{job.category.name}' notebooks_to_update[key] = (source_path, new_path) for key, (source_path, new_path) in notebooks_to_update.items(): collection_name, category_name = key.split('.', 1) logger.info(f'Updating: {collection_name} - {category_name}') if os.path.exists(new_dirpath): shutil.rmtree(new_dirpath) shutil.copytree(source_dirpath, new_dirpath)
def run_extract_metadata(options: argparse.Namespace) -> Metadata: validate_and_parse_inputs(options) for job in find_build_jobs(options.project_path, options.collection_names, options.category_names, options.notebook_names): job_context = generate_job_context(job) for notebook_context in job_context.notebooks: extract_metadata(notebook_context)
def run_generate_ci_environment(options: argparse.Namespace) -> None: validate_and_parse_inputs(options) jobs = [] for job in find_build_jobs(options.project_path, options.collection_names, options.category_names, options.notebook_names): jobs.append(job) gen_ci_env(jobs, options.ci_environment, options.project_path, options.enable_website_publication, options.enable_nightly)
def run_merge_artifacts(options: argparse.Namespace) -> None: validate_and_parse_inputs(options) command_context = CICommandContext(options.project_path, options.collection_names, options.category_names, options.notebook_names, options.ci_mode) merge_context = generate_merge_context(options.project_path, options.org, options.repo_name) run_artifact_merge(command_context, merge_context)
def test__extract_metadata__interface(metadata_rich_notebooks): # noqa F811 import json import os from nbcollection.ci.constants import SCANNER_BUILD_DIR from nbcollection.ci.scanner.utils import find_build_jobs, generate_job_context from nbcollection.ci.metadata.factory import run_extract_metadata from nbcollection.ci.metadata.utils import extract_metadata from nbcollection.ci.commands.utils import validate_and_parse_inputs from nbcollection_tests.ci.tools.utils import collection_set_to_namespace metadata_keys = ['title', 'description'] notebook_name = 'Notebook-One' options = collection_set_to_namespace(metadata_rich_notebooks, extra={ 'notebook_names': notebook_name, }) run_extract_metadata(options) for job_idx, job in enumerate( find_build_jobs(options.project_path, options.collection_names, options.category_names, options.notebook_names)): for notebook in job.category.notebooks: extract_metadata(notebook) with open(notebook.metadata.path, 'rb') as stream: metadata = json.loads(stream.read().decode(ENCODING)) for key in metadata_keys: assert key in metadata.keys() assert job_idx == 0 validative_options = collection_set_to_namespace(metadata_rich_notebooks, extra={ 'notebook_names': notebook_name, }) validate_and_parse_inputs(validative_options) for job_idx, job in enumerate( find_build_jobs(options.project_path, options.collection_names, options.category_names, options.notebook_names)): job_context = generate_job_context(job) for notebook_idx, notebook_context in enumerate(job_context.notebooks): extract_metadata(notebook_context) assert notebook_idx == 0 validative_metadata_filepath = os.path.join( SCANNER_BUILD_DIR, job.semantic_path(), f'{notebook.name}.metadata.json') with open(validative_metadata_filepath, 'rb') as stream: validative_metadata = json.loads(stream.read().decode(ENCODING)) for key in metadata_keys: assert validative_metadata[key] == metadata[key] assert job_idx == 0
def run_reset_notebook_execution(options: argparse.Namespace) -> None: validate_and_parse_inputs(options) for job in find_build_jobs(options.project_path, options.collection_names, options.category_names): for notebook in job.category.notebooks: with open(notebook.path, 'rb') as stream: notebook_data = json.loads(stream.read().decode(ENCODING)) reset_notebook_execution(notebook_data) with open(notebook.path, 'wb') as stream: stream.write(json.dumps(notebook_data).encode(ENCODING))
def convert(options=None): options = options or sys.argv parser = argparse.ArgumentParser( prog='nbcollection-ci merge-artifacts', description=DESCRIPTION, epilog=EXAMPLE_USAGE, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument( '-c', '--collection-names', required=False, default=None, help="Select a subset of Collections to be built, or all will be built" ) parser.add_argument( '-t', '--category-names', required=False, default=None, help="Select a subset of Categories to be built, or all will be built") parser.add_argument( '-n', '--notebook-names', required=False, default=None, help="Select a subset of Notebooks to be built, or all will be built") parser.add_argument('-p', '--project-path', default=PROJECT_DIR, type=str, help="Path relative to Project DIR install") parser.add_argument('-b', '--publish-branch', type=str, default='offline-artifacts') parser.add_argument('-r', '--publish-remote', type=str, default='origin') parser.add_argument('-s', '--site', type=Site, default=Site.GithubPages) parser.add_argument('-a', '--artifact-storage-directory', type=str, default='artifacts') options = parser.parse_args(options[2:]) validate_and_parse_inputs(options) run_sync(options)