def build_packages(python_packages, bucket): cmd_partial = {} build_order = get_build_order(python_packages) for package_name in build_order: python_package = python_packages[package_name] if package_name == "requirements.txt": requirements_path = os.path.join(LOCAL_PACKAGE_PATH, package_name) aws.download_file_from_s3(python_package["src_key"], requirements_path, bucket) cmd_partial[package_name] = "-r " + requirements_path else: aws.download_and_extract_zip(python_package["src_key"], LOCAL_PACKAGE_PATH, bucket) cmd_partial[package_name] = os.path.join(LOCAL_PACKAGE_PATH, package_name) logger.info("Setting up packages") restricted_packages = get_restricted_packages() for package_name in build_order: package_wheel_path = os.path.join(WHEELHOUSE_PATH, package_name) requirement = cmd_partial[package_name] logger.info("Building: {}".format(package_name)) completed_process = run("pip3 wheel -w {} {}".format( package_wheel_path, requirement).split()) if completed_process.returncode != 0: raise UserException("creating wheels", package_name) for wheelname in os.listdir(package_wheel_path): name_split = wheelname.split("-") dist_name, version = name_split[0], name_split[1] expected_version = restricted_packages.get(dist_name, None) if expected_version is not None and version != expected_version: raise UserException( "when installing {}, found {}=={} but cortex requires {}=={}" .format(package_name, dist_name, version, dist_name, expected_version)) logger.info("Validating packages") for package_name in build_order: requirement = cmd_partial[package_name] logger.info("Installing: {}".format(package_name)) completed_process = run( "pip3 install --no-index --find-links={} {}".format( os.path.join(WHEELHOUSE_PATH, package_name), requirement).split()) if completed_process.returncode != 0: raise UserException("installing package", package_name) logger.info("Caching built packages") for package_name in build_order: aws.compress_zip_and_upload( os.path.join(WHEELHOUSE_PATH, package_name), python_packages[package_name]["package_key"], bucket, )
def install_packages(python_packages, bucket): build_order = get_build_order(python_packages) for package_name in build_order: python_package = python_packages[package_name] aws.download_and_extract_zip( python_package["package_key"], os.path.join(WHEELHOUSE_PATH, package_name), bucket) if "requirements.txt" in python_packages: aws.download_file_from_s3( python_packages["requirements.txt"]["src_key"], "/requirements.txt", bucket) for package_name in build_order: cmd = package_name if package_name == "requirements.txt": cmd = "-r /requirements.txt" completed_process = run( "pip3 install --no-cache-dir --no-index --find-links={} {}".format( os.path.join(WHEELHOUSE_PATH, package_name), cmd).split()) if completed_process.returncode != 0: raise UserException("installing package", package_name) util.rm_file("/requirements.txt") util.rm_dir(WHEELHOUSE_PATH)
def start(args): ctx = Context(s3_path=args.context, cache_dir=args.cache_dir, workload_id=args.workload_id) package.install_packages(ctx.python_packages, ctx.bucket) api = ctx.apis_id_map[args.api] model = ctx.models[api["model_name"]] tf_lib.set_logging_verbosity(ctx.environment["log_level"]["tensorflow"]) local_cache["ctx"] = ctx local_cache["api"] = api local_cache["model"] = model if not os.path.isdir(args.model_dir): aws.download_and_extract_zip(model["key"], args.model_dir, ctx.bucket) for column_name in model["feature_columns"] + [model["target_column"]]: if ctx.is_transformed_column(column_name): trans_impl, _ = ctx.get_transformer_impl(column_name) local_cache["trans_impls"][column_name] = trans_impl transformed_column = ctx.transformed_columns[column_name] input_args_schema = transformed_column["inputs"]["args"] # cache aggregates and constants in memory if input_args_schema is not None: local_cache["transform_args_cache"][ column_name] = ctx.populate_args(input_args_schema) channel = implementations.insecure_channel("localhost", args.tf_serve_port) local_cache[ "stub"] = prediction_service_pb2.beta_create_PredictionService_stub( channel) local_cache["required_inputs"] = tf_lib.get_base_input_columns( model["name"], ctx) # wait a bit for tf serving to start before querying metadata limit = 600 for i in range(limit): try: local_cache["metadata"] = run_get_model_metadata() break except Exception as e: if i == limit - 1: logger.exception( "An error occurred, see `cx logs api {}` for more details." .format(api["name"])) sys.exit(1) time.sleep(1) logger.info("Serving model: {}".format(model["name"])) serve(app, listen="*:{}".format(args.port))
def build_packages(python_packages, bucket): cmd_partial = {} build_order = get_build_order(python_packages) for package_name in build_order: python_package = python_packages[package_name] if package_name == "requirements.txt": requirements_path = os.path.join(LOCAL_PACKAGE_PATH, package_name) aws.download_file_from_s3(python_package["src_key"], requirements_path, bucket) cmd_partial[package_name] = "-r " + requirements_path else: aws.download_and_extract_zip(python_package["src_key"], LOCAL_PACKAGE_PATH, bucket) cmd_partial[package_name] = os.path.join(LOCAL_PACKAGE_PATH, package_name) logger.info("Setting up packages") for package_name in build_order: requirement = cmd_partial[package_name] logger.info("Building package {}".format(package_name)) completed_process = run("pip3 wheel -w {} {}".format( os.path.join(WHEELHOUSE_PATH, package_name), requirement).split()) if completed_process.returncode != 0: raise UserException("creating wheels", package_name) logger.info("Validating packages") for package_name in build_order: requirement = cmd_partial[package_name] logger.info("Installing package {}".format(package_name)) completed_process = run( "pip3 install --no-index --find-links={} {}".format( os.path.join(WHEELHOUSE_PATH, package_name), requirement).split()) if completed_process.returncode != 0: raise UserException("installing package", package_name) logger.info("Caching built packages") for package_name in build_order: aws.compress_zip_and_upload( os.path.join(WHEELHOUSE_PATH, package_name), python_packages[package_name]["package_key"], bucket, )