def action_serve(argc, argv): global prj, app, classes, num_outputs args = parse_args(argv) prj = Project(args.path) err = prj.load() if err is not None: log.error("error while loading project: %s", err) quit() elif not prj.is_trained(): log.error("no trained Keras model found for this project") quit() if args.classes is None: num_outputs = prj.model.output.shape[1] if prj.classes is None: classes = ["class_%d" % i for i in range(num_outputs)] else: classes = [prj.classes[i] for i in range(num_outputs)] else: classes = [s.strip() for s in args.classes.split(',') if s.strip() != ""] num_outputs = len(classes) if args.profile: from werkzeug.contrib.profiler import ProfilerMiddleware args.debug = True app.config['PROFILE'] = True app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[args.restrictions]) app.run(host=args.address, port=args.port, debug=args.debug)
def action_to_fdeep(argc, argv): args = parse_args(argv) prj = Project(args.path) err = prj.load() if err is not None: log.error("error while loading project: %s", err) quit() elif not prj.is_trained(): log.error("no trained model found for this project") quit() convert(prj.weights_path, prj.fdeep_path, args.no_tests, args.metadata)
def action_serve(argc, argv): global prj, app args = parse_args(argv) prj = Project(args.path) err = prj.load() if err is not None: log.error("error while loading project: %s", err) quit() elif not prj.is_trained(): log.error("no trained Keras model found for this project") quit() app.run(host=args.host, port=args.port, debug=args.debug)
def action_to_fdeep(args): prj = Project(args.project_path) err = prj.load() if err is not None: log.error("error while loading project: %s", err) quit() elif not prj.is_trained(): log.error("no trained Keras model found for this project") quit() log.info("converting %s to %s ...", prj.weights_path, prj.fdeep_path) convert(prj.model, prj.fdeep_path)
def action_to_tf(argc, argv): args = parse_args(argv) prj = Project(args.path) err = prj.load() if err is not None: log.error("error while loading project: %s", err) quit() elif not prj.is_trained(): log.error("no trained Keras model found for this projec") quit() frozen_graph = freeze_session(K.get_session(), output_names=[out.op.name for out in prj.model.outputs]) log.info("saving protobuf to %s ...", os.path.join(prj.path, 'model.pb')) tf.train.write_graph(frozen_graph, prj.path, "model.pb", as_text=False)
def action_relevance(argc, argv): args = parse_args(argv) prj = Project(args.path) err = prj.load() if err is not None: log.error("error while loading project: %s", err) quit() elif not prj.is_trained(): log.error("no trained Keras model found for this project") quit() prj.prepare(args.dataset, 0.0, 0.0) X, y = prj.dataset.subsample(args.ratio) nrows, ncols = X.shape if prj.dataset.is_flat else (X[0].shape[0], len(X)) attributes = get_attributes(args.attributes, ncols) log.info("computing relevance of %d attributes on %d samples ...", ncols, nrows) start = time.time() ref_accu, ref_cm = prj.accuracy_for(X, y, repo_as_dict=True) deltas = [] tot = 0 speed = (1.0 / (time.time() - start)) * nrows for col in range(0, ncols): log.info( "[%.2f evals/s] computing relevance for attribute [%d/%d] %s ...", speed, col + 1, ncols, attributes[col]) backup = zeroize_feature(X, col, prj.dataset.is_flat) start = time.time() accu, cm = prj.accuracy_for(X, y, repo_as_dict=True) speed = (1.0 / (time.time() - start)) * nrows delta = ref_accu['weighted avg']['precision'] - accu['weighted avg'][ 'precision'] tot += delta deltas.append((col, delta)) restore_feature(X, col, backup, prj.dataset.is_flat) deltas = sorted(deltas, key=lambda x: abs(x[1]), reverse=True) rels = [] num_zero = 0 table = [("Column", "Feature", "Relevance")] for delta in deltas: col, d = delta colname = attributes[col] rel = {"attribute": colname, "index": col, "relevance": 0.0} if d != 0.0: relevance = (d / tot) * 100.0 row = ("%d" % col, attributes[col], "%.2f%%" % relevance) row = ["\033[31m%s\033[0m" % e for e in row] if relevance < 0.0 else row table.append(row) rel['relevance'] = relevance else: num_zero += 1 rels.append(rel) print("") print(AsciiTable(table).table) print("") if num_zero > 0: log.info("%d features have 0 relevance.", num_zero) if args.to_json is not None: print("") log.info("creating %s ...", args.to_json) with open(args.to_json, 'w+') as fp: json.dump(rels, fp, default=default)
def action_relevance(argc, argv): global prj, deltas, tot, start, speed, nrows, ncols, attributes args = parse_args(argv) prj = Project(args.path) err = prj.load() if err is not None: log.error("error while loading project: %s", err) quit() elif not prj.is_trained(): log.error("no trained Keras model found for this project") quit() prj.prepare(args.dataset, 0.0, 0.0) # one single worker in blocking mode = serial if args.workers == 0: args.workers = 1 X, y = prj.dataset.subsample(args.ratio) nrows, ncols = X.shape if prj.dataset.is_flat else (X[0].shape[0], len(X)) attributes = get_attributes(args.attributes, ncols) queue = TaskQueue('relevance', num_workers=args.workers, blocking=True) if args.workers == 1: log.info("computing relevance of %d attributes on %d samples using '%s' metric (slow mode) ...", ncols, nrows, args.metric) else: log.info("computing relevance of %d attributes on %d samples using '%s' metric (parallel with %d workers) ...", ncols, nrows, args.metric, queue.num_workers) start = time.time() ref_accu, ref_cm = prj.accuracy_for(X, y, repo_as_dict = True) speed = (1.0 / (time.time() - start)) * nrows for col in range(0, ncols): queue.add_task( run_inference_without, X, y, col, prj.dataset.is_flat, ref_accu['weighted avg'][args.metric], args.metric) # wait for all inferences to finish queue.join() # sort relevances by absolute value deltas = sorted(deltas, key = lambda x: abs(x[1]), reverse = True) rels = [] num_zero = 0 table = [("Column", "Feature", "Relevance")] for delta in deltas: col, d = delta colname = attributes[col] rel = { "attribute": colname, "index": col, "relevance": 0.0 } if d != 0.0: relevance = (d / tot) * 100.0 row = ("%d" % col, attributes[col], "%.2f%%" % relevance) row = ["\033[31m%s\033[0m" % e for e in row] if relevance < 0.0 else row table.append(row) rel['relevance'] = relevance else: num_zero += 1 rels.append(rel) print("") print(AsciiTable(table).table) print("") if num_zero > 0: log.info("%d features have 0 relevance.", num_zero) if args.to_json is not None: print("") log.info("creating %s ...", args.to_json) with open(args.to_json, 'w+') as fp: json.dump(rels, fp, default=default)