def remote_centroid(cluster, test_dataset, batch_centroid_args, args, api, resume, prediction_file=None, session_file=None, path=None, log=None): """Computes a centroid for each entry in the `test_set`. Predictions are computed remotely using the batch centroid call. """ cluster_id = bigml.api.get_cluster_id(cluster) # if resuming, try to extract dataset form log files if resume: message = u.dated("Batch centroid not found. Resuming.\n") resume, batch_centroid = c.checkpoint( c.is_batch_centroid_created, path, debug=args.debug, message=message, log_file=session_file, console=args.verbosity) if not resume: batch_centroid = create_batch_centroid( cluster_id, test_dataset, batch_centroid_args, args, api, session_file=session_file, path=path, log=log) if not args.no_csv: file_name = api.download_batch_centroid(batch_centroid, prediction_file) if file_name is None: sys.exit("Failed downloading CSV.") if args.to_dataset: batch_centroid = bigml.api.check_resource(batch_centroid, api=api) new_dataset = bigml.api.get_dataset_id( batch_centroid['object']['output_dataset_resource']) if new_dataset is not None: message = u.dated("Batch centroid dataset created: %s\n" % u.get_url(new_dataset)) u.log_message(message, log_file=session_file, console=args.verbosity) u.log_created_resources("batch_centroid_dataset", path, new_dataset, mode='a')
def remote_centroid(cluster, test_dataset, batch_centroid_args, args, api, resume, prediction_file=None, session_file=None, path=None, log=None): """Computes a centroid for each entry in the `test_set`. Predictions are computed remotely using the batch centroid call. """ cluster_id = bigml.api.get_cluster_id(cluster) # if resuming, try to extract dataset form log files if resume: message = u.dated("Batch centroid not found. Resuming.\n") resume, batch_centroid = c.checkpoint( c.is_batch_centroid_created, path, debug=args.debug, message=message, log_file=session_file, console=args.verbosity) if not resume: batch_centroid = create_batch_centroid( cluster_id, test_dataset, batch_centroid_args, args, api, session_file=session_file, path=path, log=log) if not args.no_csv: api.download_batch_centroid(batch_centroid, prediction_file) if args.to_dataset: batch_centroid = bigml.api.check_resource(batch_centroid, api=api) new_dataset = bigml.api.get_dataset_id( batch_centroid['object']['output_dataset_resource']) if new_dataset is not None: message = u.dated("Batch centroid dataset created: %s\n" % u.get_url(new_dataset)) u.log_message(message, log_file=session_file, console=args.verbosity) u.log_created_resources("batch_centroid_dataset", path, new_dataset, mode='a')
def remote_centroid(cluster, test_dataset, batch_centroid_args, args, api, resume, prediction_file=None, session_file=None, path=None, log=None): """Computes a centroid for each entry in the `test_set`. Predictions are computed remotely using the batch centroid call. """ cluster_id = bigml.api.get_cluster_id(cluster) # if resuming, try to extract dataset form log files if resume: message = u.dated("Batch centroid not found. Resuming.\n") resume, batch_centroid = c.checkpoint( c.is_batch_centroid_created, path, debug=args.debug, message=message, log_file=session_file, console=args.verbosity) if not resume: batch_centroid = create_batch_centroid( cluster_id, test_dataset, batch_centroid_args, args, api, session_file=session_file, path=path, log=log) api.download_batch_centroid(batch_centroid, prediction_file)