コード例 #1
0
def main(argv):
    parser = ArgumentParser(
        description='Prepare the Terraform config generated by `chalice package'
        '--pkg-format terraform` and copy it into the terraform/ '
        'directory.')
    parser.add_argument('lambda_name',
                        help='the lambda of the config that will be '
                        'transformed and copied')
    options = parser.parse_args(argv)
    source_dir = Path(
        config.project_root
    ) / 'lambdas' / options.lambda_name / '.chalice' / 'terraform'
    output_dir = Path(config.project_root) / 'terraform' / options.lambda_name
    output_dir.mkdir(exist_ok=True)

    deployment_src = source_dir / 'deployment.zip'
    deployment_dst = output_dir / 'deployment.zip'
    log.info('Copying %s to %s', deployment_src, deployment_dst)
    shutil.copyfile(deployment_src, deployment_dst)

    tf_src = source_dir / 'chalice.tf.json'
    tf_dst = output_dir / 'chalice.tf.json'
    log.info('Transforming %s to %s', tf_src, tf_dst)
    with open(tf_src, 'r') as f:
        output_json = json.load(f)
    output_json = transform_tf(output_json)
    with write_file_atomically(tf_dst) as f:
        json.dump(output_json, f, indent=4)
コード例 #2
0
ファイル: can_bundle.py プロジェクト: DataBiosphere/azul
def save_bundle(bundle: Bundle, output_dir: str) -> None:
    for obj, suffix in [(bundle.manifest, '.manifest.json'),
                        (bundle.metadata_files, '.metadata.json')]:
        path = os.path.join(output_dir, bundle.uuid + suffix)
        with write_file_atomically(path) as f:
            json.dump(obj, f, indent=4)
        logger.info('Successfully wrote %s', path)
コード例 #3
0
def main():
    catalogs = {
        'dcp2':
        config.Catalog(name='dcp2',
                       atlas='hca',
                       internal=False,
                       plugins=dict(
                           metadata=config.Catalog.Plugin(name='hca'),
                           repository=config.Catalog.Plugin(name='tdr')),
                       sources=set())
    }

    # To create a normalized OpenAPI document, we patch any
    # deployment-specific variables that affect the document.
    with patch.object(target=type(config),
                      attribute='catalogs',
                      new_callable=PropertyMock,
                      return_value=catalogs):
        assert config.catalogs == catalogs
        with patch.object(target=config,
                          attribute='service_function_name',
                          return_value='azul_service'):
            assert config.service_name == 'azul_service'
            with patch.object(target=config,
                              attribute='service_endpoint',
                              return_value='localhost'):
                assert config.service_endpoint() == 'localhost'
                app_module = load_app_module('service')
                app_spec = app_module.app.spec()
                doc_path = os.path.join(config.project_root,
                                        'lambdas/service/openapi.json')
                with write_file_atomically(doc_path) as file:
                    json.dump(app_spec, file, indent=4)
コード例 #4
0
def main(argv):
    parser = ArgumentParser(
        description='Prepare the Terraform config generated by '
        '`chalice package --pkg-format terraform` '
        'and copy it into the terraform/ directory.')
    parser.add_argument('lambda_name',
                        help='The name of the Lambda function to prepare.')
    options = parser.parse_args(argv)
    lambda_name = options.lambda_name
    src_dir = chalice.package_dir(lambda_name)
    dst_dir = chalice.module_dir(lambda_name)
    dst_dir.mkdir(exist_ok=True)

    args = [dir / chalice.package_zip_name for dir in (src_dir, dst_dir)]
    log.info('Copying %s to %s', *args)
    shutil.copyfile(*args)

    src_tf, dst_tf = [
        dir / chalice.tf_config_name for dir in (src_dir, dst_dir)
    ]
    log.info('Transforming %s to %s', src_tf, dst_tf)
    with open(src_tf) as f:
        tf_config = json.load(f)
    tf_config = populate_tags(
        chalice.patch_resource_names(transform_tf(tf_config)))
    with write_file_atomically(dst_tf) as f:
        json.dump(tf_config, f, indent=4)
コード例 #5
0
ファイル: queues.py プロジェクト: DataBiosphere/azul
 def _dump_messages(self, messages, queue_url, path):
     messages = [self._condense(message) for message in messages]
     with write_file_atomically(path) as file:
         content = {
             'queue': queue_url,
             'messages': messages
         }
         json.dump(content, file, indent=4)
     logger.info('Wrote %i messages', len(messages))
コード例 #6
0
ファイル: changelog.py プロジェクト: NoopDog/azul
def write_changes(output_dir_path):
    """
    Write the change log as a Python literal to a module in the given directory. We're using Python syntax because it
    can be looked up and loaded very easily. See changes().
    """
    with write_file_atomically(
            os.path.join(output_dir_path, module_name + '.py')) as f:
        # Write each change as a single line. I tried pprint() but it reorders the keys in dictionaries and its line
        # wrapping algorithm is creating a non-uniform output.
        f.write(variable_name + ' = [\n')
        for change in changelog()[variable_name]:
            f.write('    ' + repr(change) + ',\n')
        f.write(']\n')
コード例 #7
0
def write(output_path: Path, variables: List[Variable], comments: List[str]):
    with write_file_atomically(output_path) as output:
        output.write(
            dedent('''
            from typing import Optional, Mapping


            def env() -> Mapping[str, Optional[str]]:
                """
                Returns a dictionary that maps environment variable names to values. The
                values are either None or strings. String values can contain references to
                other environment variables in the form `{FOO}` where FOO is the name of an
                environment variable. See

                https://docs.python.org/3.8/library/string.html#format-string-syntax

                for the concrete syntax. These references will be resolved *after* the
                overall environment has been compiled by merging all relevant
                `environment.py` and `environment.local.py` files.

                Entries with a `None` value will be excluded from the environment. They
                can be used to document a variable without a default value in which case
                other, more specific `environment.py` or `environment.local.py` files must
                provide the value.
                """
                return {
        '''[1:]))
        indent = '    '

        for variable in variables:
            for comment in variable.comments:
                output.write(indent * 2 + comment)
            output.write(
                f"{indent * 2}'{variable.name}': {convert_value(variable.value)},\n"
            )
        for comment in comments:
            output.write(indent * 2 + comment)
        output.write(indent + '}\n')
コード例 #8
0
ファイル: can_bundle.py プロジェクト: NoopDog/azul
def main(argv):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '--dss-url',
        '-u',
        default=config.dss_endpoint,
        help=
        'The URL of the DSS REST API endpoint from which to download the bundle to be canned '
        '(default: %(default)s).')
    parser.add_argument(
        '--replica',
        '-r',
        default='aws',
        help=
        "The replica from which to donwload the bundle to be canned (default: %(default)s)."
    )
    parser.add_argument('--uuid',
                        '-b',
                        required=True,
                        help='The UUID of the bundle to can.')
    parser.add_argument(
        '--version',
        '-v',
        help='The version of the bundle to can  (default: the latest version).'
    )
    parser.add_argument(
        '--output-dir',
        '-O',
        default=os.path.join(config.project_root, 'test', 'indexer', 'data'),
        help='The path to the output directory (default: %(default)s).')
    parser.add_argument(
        '--api-json',
        '-A',
        default=False,
        action='store_true',
        help=
        "Dump the return value of metadata-api's as_json function (default off)."
    )
    args = parser.parse_args(argv)

    dss_client = azul.dss.direct_access_client(
        dss_endpoint=args.dss_url, num_workers=config.num_dss_workers)
    version, manifest, metadata_files = download_bundle_metadata(
        client=dss_client,
        replica=args.replica,
        uuid=args.uuid,
        version=args.version,
        num_workers=config.num_dss_workers)
    logger.info('Downloaded bundle %s version %s from replica %s.', args.uuid,
                version, args.replica)

    api_json = as_json(Bundle(args.uuid, version, manifest,
                              metadata_files)) if args.api_json else None

    for obj, suffix in [(manifest, ".manifest.json"),
                        (metadata_files, '.metadata.json'),
                        *([(api_json, ".api.json")] if api_json else [])]:
        path = os.path.join(args.output_dir, args.uuid + suffix)
        with write_file_atomically(path) as f:
            json.dump(obj, f, indent=4)
        logger.info("Successfully wrote %s", path)
コード例 #9
0
def main(argv):
    """
    Load a canned bundle from DCP/1 and write *.manifest.tdr and *.metadata.tdr
    files showing the desired output for DCP/2.
    """
    default_version = datetime(year=2021, month=1, day=17, hour=0)

    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('--bundle-uuid',
                        '-b',
                        default=TestTDRPlugin.bundle_uuid,
                        help='The UUID of the existing DCP/1 canned bundle.')
    parser.add_argument(
        '--source-id',
        '-s',
        default=TestTDRPlugin.snapshot_id,
        help=
        'The UUID of the snapshot/dataset to contain the canned DCP/2 bundle.')
    parser.add_argument(
        '--version',
        '-v',
        default=tdr.Plugin.format_version(default_version),
        help='The version for any mock entities synthesized by the script.')
    parser.add_argument('--input-dir',
                        '-I',
                        default=os.path.join(config.project_root, 'test',
                                             'indexer', 'data'),
                        help='The path to the input directory.')
    parser.add_argument(
        '--mock-supplementary-files',
        '-S',
        type=int,
        default=0,
        help='The number of mock supplementary files to add to the output.')
    args = parser.parse_args(argv)

    paths = file_paths(args.input_dir, args.bundle_uuid)

    log.debug('Reading canned bundle %r from %r', args.bundle_uuid,
              paths['dss'])
    with open(paths['dss']['manifest']) as f:
        manifest = json.load(f)
    with open(paths['dss']['metadata']) as f:
        metadata = json.load(f)

    dss_source = DSSSourceRef(id='',
                              spec=SimpleSourceSpec(
                                  prefix=Prefix.of_everything,
                                  name=config.dss_endpoint))
    dss_bundle = DSSBundle(fqid=SourcedBundleFQID(source=dss_source,
                                                  uuid=args.bundle_uuid,
                                                  version=''),
                           manifest=manifest,
                           metadata_files=metadata)

    tdr_source = TDRSourceRef(id=args.source_id,
                              spec=TDRSourceSpec(prefix=Prefix.of_everything,
                                                 project='test_project',
                                                 name='test_name',
                                                 is_snapshot=True))
    tdr_bundle = dss_bundle_to_tdr(dss_bundle, tdr_source)

    add_supp_files(tdr_bundle,
                   num_files=args.mock_supplementary_files,
                   version=args.version)

    log.debug('Writing converted bundle %r to %r', args.bundle_uuid,
              paths['tdr'])
    with write_file_atomically(paths['tdr']['result']) as f:
        json.dump(
            {
                'manifest': tdr_bundle.manifest,
                'metadata': tdr_bundle.metadata_files
            },
            f,
            indent=4)

    with write_file_atomically(paths['tdr']['tables']) as f:
        json.dump(dump_tables(tdr_bundle), f, indent=4)