def anomalies_processing(datasets, anomalies, anomaly_ids, api, args, resume, fields=None, session_file=None, path=None, log=None): """Creates or retrieves anomalies from the command data """ # If we have a dataset but not a model, we create the model if the no_model # flag hasn't been set up. if datasets and not (has_anomalies(args) or args.no_anomaly): anomaly_ids = [] anomalies = [] # Only 1 anomaly detector per bigmler command at present number_of_anomalies = 1 if resume: resume, anomaly_ids = c.checkpoint(c.are_anomalies_created, path, number_of_anomalies, debug=args.debug) if not resume: message = u.dated("Found %s anomaly detectors out of %s." " Resuming.\n" % (len(anomaly_ids), number_of_anomalies)) u.log_message(message, log_file=session_file, console=args.verbosity) anomalies = anomaly_ids number_of_anomalies -= len(anomaly_ids) anomaly_args = r.set_anomaly_args(args, fields=fields, anomaly_fields=args.anomaly_fields_) anomalies, anomaly_ids = r.create_anomalies(datasets, anomalies, anomaly_args, args, api, path, session_file, log) # If an anomaly detector is provided, we use it. elif args.anomaly: anomaly_ids = [args.anomaly] anomalies = anomaly_ids[:] elif args.anomalies or args.anomaly_tag: anomalies = anomaly_ids[:] # If we are going to predict we must retrieve the anomalies if anomaly_ids and args.test_set: anomalies, anomaly_ids = r.get_anomalies(anomalies, args, api, session_file) return anomalies, anomaly_ids, resume
def anomalies_processing(datasets, anomalies, anomaly_ids, api, args, resume, fields=None, session_file=None, path=None, log=None): """Creates or retrieves anomalies from the command data """ # If we have a dataset but not a model, we create the model if the no_model # flag hasn't been set up. if datasets and not (has_anomalies(args) or args.no_anomaly): anomaly_ids = [] anomalies = [] # Only 1 anomaly detector per bigmler command at present number_of_anomalies = 1 if resume: resume, anomaly_ids = c.checkpoint( c.are_anomalies_created, path, number_of_anomalies, debug=args.debug) if not resume: message = u.dated("Found %s anomaly detectors out of %s." " Resuming.\n" % (len(anomaly_ids), number_of_anomalies)) u.log_message(message, log_file=session_file, console=args.verbosity) anomalies = anomaly_ids number_of_anomalies -= len(anomaly_ids) anomaly_args = r.set_anomaly_args(args, fields=fields, anomaly_fields=args.anomaly_fields_) anomalies, anomaly_ids = r.create_anomalies(datasets, anomalies, anomaly_args, args, api, path, session_file, log) # If an anomaly detector is provided, we use it. elif args.anomaly: anomaly_ids = [args.anomaly] anomalies = anomaly_ids[:] elif args.anomalies or args.anomaly_tag: anomalies = anomaly_ids[:] # If we are going to predict we must retrieve the anomalies if anomaly_ids and args.test_set: anomalies, anomaly_ids = r.get_anomalies(anomalies, args, api, session_file) return anomalies, anomaly_ids, resume