コード例 #1
0
def build_and_stage_runtime(runtime_name, runtime_config):
    python_ver = runtime_config['pythonver']
    conda_install = runtime_config['conda_install']
    pip_install = runtime_config['pip_install']
    pip_upgrade = runtime_config['pip_upgrade']
    execute(create_runtime, python_ver, conda_install, pip_install,
            pip_upgrade)
    execute(shrink_conda, CONDA_INSTALL_DIR)
    freeze_str = execute(get_runtime_pip_freeze, CONDA_INSTALL_DIR)
    freeze_str_single = freeze_str.values()[0]  # HACK

    freeze_pkgs = format_freeze_str(freeze_str_single)

    conda_env_yaml = execute(get_conda_root_env, CONDA_INSTALL_DIR)
    conda_env_yaml_single = conda_env_yaml.values()[0]  # HACK
    pickle.dump(conda_env_yaml_single, open("debug.pickle", 'w'))
    conda_env = yaml.load(conda_env_yaml_single)
    runtime_dict = {
        'python_ver': python_ver,
        'conda_install': conda_install,
        'pip_install': pip_install,
        'pip_upgrade': pip_upgrade,
        'pkg_ver_list': freeze_pkgs,
        'conda_env_config': conda_env
    }

    runtime_tar_gz, runtime_meta_json = runtimes.get_staged_runtime_url(
        runtime_name, python_ver)

    execute(package_all, runtime_tar_gz)
    with open('runtime.meta.json', 'w') as outfile:
        json.dump(runtime_dict, outfile)
        outfile.flush()

    local("aws s3 cp runtime.meta.json {}".format(runtime_meta_json))
コード例 #2
0
ファイル: fabfile_builder.py プロジェクト: depsched/sim
def deploy_runtime(runtime_name, python_ver):
    # move from staging to production
    staging_runtime_tar_gz, staging_runtime_meta_json \
        = runtimes.get_staged_runtime_url(runtime_name, python_ver)

    runtime_tar_gz, runtime_meta_json = runtimes.get_runtime_url(
        runtime_name, python_ver)

    local("aws s3 cp {} {}".format(staging_runtime_tar_gz, runtime_tar_gz))

    local("aws s3 cp {} {}".format(staging_runtime_meta_json,
                                   runtime_meta_json))
コード例 #3
0
ファイル: fabfile_builder.py プロジェクト: depsched/sim
def deploy_runtimes(num_shards=10):
    num_shards = int(num_shards)
    for runtime_name, rc in runtimes.RUNTIMES.items():
        for python_ver in rc['pythonvers']:
            staging_runtime_tar_gz, staging_runtime_meta_json \
                = runtimes.get_staged_runtime_url(runtime_name, python_ver)

            # Always upload to the base tar gz url.
            base_tar_gz = runtimes.get_runtime_url_from_staging(
                staging_runtime_tar_gz)
            local("aws s3 cp {} {}".format(staging_runtime_tar_gz,
                                           base_tar_gz))

            runtime_meta_json_url = runtimes.get_runtime_url_from_staging(
                staging_runtime_meta_json)
            # If required, generate the shard urls and update metadata
            if num_shards > 1:
                local("aws s3 cp {} runtime.meta.json".format(
                    staging_runtime_meta_json))
                meta_dict = json.load(open('runtime.meta.json', 'r'))
                shard_urls = []
                for shard_id in xrange(num_shards):
                    bucket_name, key = runtimes.split_s3_url(base_tar_gz)
                    shard_key = runtimes.get_s3_shard(key, shard_id)
                    hash_s3_key = runtimes.hash_s3_key(shard_key)
                    shard_url = "s3://{}/{}".format(bucket_name, hash_s3_key)
                    local("aws s3 cp {} {}".format(base_tar_gz, shard_url))
                    shard_urls.append(shard_url)

                meta_dict['urls'] = shard_urls
                with open('runtime.meta.json', 'w') as outfile:
                    json.dump(meta_dict, outfile)
                    outfile.flush()
                local("aws s3 cp runtime.meta.json {}".format(
                    runtime_meta_json_url))
            else:
                local("aws s3 cp {} {}".format(staging_runtime_meta_json,
                                               runtime_meta_json_url))
コード例 #4
0
for runtime_name, runtime_config in runtimes.RUNTIMES.items():
    python_versions = runtime_config['pythonvers']
    for python_ver in python_versions:
        python_ver_major, python_ver_minor = python_ver.split(".")
        python_ver_major = int(python_ver_major)
        python_ver_minor = int(python_ver_minor)
        packages = runtime_config['packages']
        # skip all of the ones not for this version of python. Someday we should
        # be smarter about this
        if sys.version_info.major == python_ver_major \
           and sys.version_info.minor == python_ver_minor:
            print("running runtime config {}".format(runtime_name))

            # create an executor
            config = pywren.wrenconfig.default()
            staged_runtime_url, staged_meta_url = runtimes.get_staged_runtime_url(
                runtime_name, python_ver)
            assert staged_runtime_url[:5] == "s3://"
            splits = staged_runtime_url[5:].split("/")
            bucket = splits[0]
            key = "/".join(splits[1:])
            config['runtime']['bucket'] = bucket
            config['runtime']['s3_key'] = key
            print("running with bucket={} key={}".format(bucket, key))
            wrenexec = pywren.lambda_executor(config)

            def import_check(x):
                results = {}

                conda_results = {}
                for pkg in packages['conda_install']:
                    if pkg in runtimes.CONDA_TEST_STRS: