Пример #1
0
def _execute(args):
    pth = path.abspath(args.function_dir)

    cfg = config.Config(pth,
                        args.config,
                        role=args.role,
                        variables=args.variables)

    if args.s3_bucket:
        cfg.set_s3(args.s3_bucket, args.s3_key)

    if args.no_virtualenv:
        # specified flag to omit entirely
        venv = False
    elif args.virtualenv:
        # specified a custom virtualenv
        venv = args.virtualenv
    else:
        # build and include virtualenv, the default
        venv = None

    if args.no_build:
        pkg = package.create_package(pth)
    else:
        _print('Building Package')
        requirements = cfg.requirements
        if args.requirements:
            requirements = path.abspath(args.requirements)
        extra_files = cfg.extra_files
        if args.extra_files:
            extra_files = args.extra_files
        pkg = package.build_package(pth, requirements, venv, cfg.ignore,
                                    extra_files)

    if not args.no_clean:
        pkg.clean_workspace()

    if not args.no_upload:
        # Set publish if flagged to do so
        if args.publish:
            cfg.set_publish()

        create_alias = False
        # Set alias if the arg is passed
        if args.alias is not None:
            cfg.set_alias(args.alias, args.alias_description)
            create_alias = True

        _print('Uploading Package')
        upldr = uploader.PackageUploader(cfg, args.profile)
        upldr.upload(pkg)
        # If the alias was set create it
        if create_alias:
            upldr.alias()

        pkg.clean_zipfile()

    _print('Fin')
Пример #2
0
 def upload_lambda(self, pkg, upldr_config):
     LOG.warning(
         "Uploading Lambda %s to AWS Account %s "
         "for region %s ...", upldr_config.name, self.account_id,
         upldr_config.region)
     upldr = uploader.PackageUploader(upldr_config, None)
     upldr.upload(pkg)
     upldr.alias()
     pkg.clean_zipfile()
Пример #3
0
def test_s3_upload():
    mock_bucket = 'mybucket'
    conn = boto3.resource('s3')
    conn.create_bucket(Bucket=mock_bucket)

    conf = config.Config(path.dirname(__file__),
                         config_file=path.join(EX_CONFIG, 'lambda.json'))
    conf.set_s3(mock_bucket)
    upldr = uploader.PackageUploader(conf, None)

    upldr._upload_s3(path.join(path.dirname(__file__), 'dummyfile'))

    # fetch the contents back out, be sure we truly uploaded the dummyfile
    retrieved_bucket = conn.Object(
        mock_bucket,
        conf.s3_package_name()
        ).get()['Body']
    found_contents = str(retrieved_bucket.read()).rstrip()
    if python_version() < '3.0.0':
        assert found_contents == 'dummy data'
    else:
        assert found_contents == "b'dummy data\\n'"