def remote_anomaly_score(anomaly, test_dataset, batch_anomaly_score_args, args, api, resume, prediction_file=None, session_file=None, path=None, log=None): """Computes an anomaly score for each entry in the `test_set`. Predictions are computed remotely using the batch anomaly score call. """ anomaly_id = bigml.api.get_anomaly_id(anomaly) # if resuming, try to extract dataset form log files if resume: message = u.dated("Batch anomaly score not found. Resuming.\n") resume, batch_anomaly_score = c.checkpoint( c.is_batch_anomaly_score_created, path, debug=args.debug, message=message, log_file=session_file, console=args.verbosity) if not resume: batch_anomaly_score = create_batch_anomaly_score( anomaly_id, test_dataset, batch_anomaly_score_args, args, api, session_file=session_file, path=path, log=log) if not args.no_csv: api.download_batch_anomaly_score(batch_anomaly_score, prediction_file) if args.to_dataset: batch_anomaly_score = bigml.api.check_resource(batch_anomaly_score, api=api) new_dataset = bigml.api.get_dataset_id( batch_anomaly_score['object']['output_dataset_resource']) if new_dataset is not None: message = u.dated("Batch anomaly score dataset created: %s\n" % u.get_url(new_dataset)) u.log_message(message, log_file=session_file, console=args.verbosity) u.log_created_resources("batch_anomaly_score_dataset", path, new_dataset, open_mode='a')
def remote_anomaly_score(anomaly, test_dataset, batch_anomaly_score_args, args, api, resume, prediction_file=None, session_file=None, path=None, log=None): """Computes an anomaly score for each entry in the `test_set`. Predictions are computed remotely using the batch anomaly score call. """ anomaly_id = bigml.api.get_anomaly_id(anomaly) # if resuming, try to extract dataset form log files if resume: message = u.dated("Batch anomaly score not found. Resuming.\n") resume, batch_anomaly_score = c.checkpoint( c.is_batch_anomaly_score_created, path, debug=args.debug, message=message, log_file=session_file, console=args.verbosity) if not resume: batch_anomaly_score = create_batch_anomaly_score( anomaly_id, test_dataset, batch_anomaly_score_args, args, api, session_file=session_file, path=path, log=log) if not args.no_csv: api.download_batch_anomaly_score(batch_anomaly_score, prediction_file) if args.to_dataset: batch_anomaly_score = bigml.api.check_resource(batch_anomaly_score, api=api) new_dataset = bigml.api.get_dataset_id( batch_anomaly_score['object']['output_dataset_resource']) if new_dataset is not None: message = u.dated("Batch anomaly score dataset created: %s\n" % u.get_url(new_dataset)) u.log_message(message, log_file=session_file, console=args.verbosity) u.log_created_resources("batch_anomaly_score_dataset", path, new_dataset, mode='a')
def remote_anomaly_score(anomaly, test_dataset, batch_anomaly_score_args, args, api, resume, prediction_file=None, session_file=None, path=None, log=None): """Computes an anomaly score for each entry in the `test_set`. Predictions are computed remotely using the batch anomaly score call. """ anomaly_id = bigml.api.get_anomaly_id(anomaly) # if resuming, try to extract dataset form log files if resume: message = u.dated("Batch anomaly score not found. Resuming.\n") resume, batch_anomaly_score = c.checkpoint( c.is_batch_anomaly_score_created, path, debug=args.debug, message=message, log_file=session_file, console=args.verbosity) if not resume: batch_anomaly_score = create_batch_anomaly_score( anomaly_id, test_dataset, batch_anomaly_score_args, args, api, session_file=session_file, path=path, log=log) api.download_batch_anomaly_score(batch_anomaly_score, prediction_file)