def _do_reuters_upload(host=None, s3_bucket=None, knative=False): assert host or s3_bucket or knative, "Must give a host, S3 bucket or knative" user = "******" host = get_kubernetes_upload_host(knative, host) # Upload the matrix data upload_sparse_matrix(user, "inputs", _REUTERS_DATA_DIR, host=host, s3_bucket=s3_bucket) # Upload the categories data cat_path = join(_REUTERS_DATA_DIR, "outputs") upload_binary_state(user, "outputs", cat_path, host=host, s3_bucket=s3_bucket) # Upload the feature counts counts_path = join(_REUTERS_DATA_DIR, "feature_counts") upload_binary_state(user, "feature_counts", counts_path, host=host, s3_bucket=s3_bucket)
def matrix_state_upload(ctx, mat_size, n_splits, host=None, knative=True): user = "******" host = get_kubernetes_upload_host(knative, host) upload_binary_state(user, MATRIX_CONF_STATE_KEY, get_params_file(mat_size, n_splits), host=host) upload_binary_state(user, SUBMATRICES_KEY_A, get_mat_a_file(mat_size, n_splits), host=host) upload_binary_state(user, SUBMATRICES_KEY_B, get_mat_b_file(mat_size, n_splits), host=host) upload_binary_state(user, RESULT_MATRIX_KEY, get_result_file(mat_size, n_splits), host=host)
def _do_upload(data_dir, file_name, user, host, key=None): print("Uploading state {}".format(file_name)) file_path = join(data_dir, file_name) key = key if key else file_name upload_binary_state(user, key, file_path, host=host)