コード例 #1
0
def upload_table():
    data = request.form

    notebook = get_notebook_data(data['notebook_name'])

    # Check if file has been uploaded succesfully
    if 'file' not in request.files:
        return json_encoder.encode({
            "message": "Failure",
            "comment": "No file received"
        })

    file = request.files['file']

    # Check if there is a file
    if file.filename == '':
        return json_encoder.encode({
            "message": "Failure",
            "comment": "No file selected"
        })

    if (request.form['load_notebook_status'] == 'false'):

        # Check for file types
        if file and allowed_file(file.filename):

            filename = secure_filename(file.filename)
            file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))

            uploaded_file = open(
                os.path.join(app.config['UPLOAD_FOLDER'], filename), "r")
            reader = csv.reader(uploaded_file)
            total_cols = len(next(reader))
            features_cols = []
            label_cols = []

            for col_num in range(0, total_cols - 1):
                features_cols.append(col_num)

            label_cols.append(total_cols - 1)

            # extract contents of the file to give X and Y data
            csvObject = CSV(
                os.path.join(app.config['UPLOAD_FOLDER'], filename), {
                    'features': features_cols,
                    'labels': label_cols
                }, False)
            X, Y = csvObject.extract()

        # store extracted data into notebook
        notebook['x_raw'] = X
        notebook['y_raw'] = Y
        notebook['file_name'] = data['file_name']

        set_notebook_data(data['notebook_name'])

    return json_encoder.encode({
        "message": "Success",
        "comment": "Table loaded successfully"
    })
コード例 #2
0
def check_username_and_password_matches():
    try:

        # if new installation, create a new user table
        if not (user_table_exists()):
            create_new_user_table()

        # extract from POST request form data wrapped in json
        username = request.json['username']
        password = request.json['password']

        # open user table and check if username and password match.
        fileObject = open("USERTABLE", "rb")
        table = pickle.load(fileObject)

        if (any(username == obj['username'] and password == obj['password']
                for obj in table)):
            return json_encoder.encode({
                "message": "Success",
                "comment": "Username and password match"
            })

        return json_encoder.encode({
            "message":
            "Success",
            "comment":
            "Username and password does not match"
        })
    except:
        return json_encoder.encode({
            "message": "Failure",
            "comment": "Other error"
        })
コード例 #3
0
def check_username_exists(check_username_exists_json):
    try:
        check_username_exists_dict = json_decoder.decode(
            check_username_exists_json)

        # if new installation, create new user table

        if not (user_table_exists()):
            create_new_user_table()

        # check user table is username exists

        fileObject = open("USERTABLE", "rb")
        table = pickle.load(fileObject)

        if any(obj['username'] == check_username_exists_dict['username']
               for obj in table):
            return json_encoder.encode({
                "message": "Success",
                "comment": "Username exists"
            })

        return json_encoder.encode({
            "message": "Success",
            "comment": "Username available"
        })

    except:
        return json_encoder.encode({
            "message": "Failure",
            "comment": "Other error"
        })
コード例 #4
0
def check_notebook_name_exists(check_notebook_name_exists_json):
    check_notebook_name_exists_dict = json_decoder.decode(
        check_notebook_name_exists_json)

    # if new installation, create global notebooks table

    if not (notebook_global_table_exist()):
        create_notebook_global_table()

    # opens the global notebooks table, and checks whether notebook exists

    fileObject = open("NOTEBOOKS_DATA", "rb")
    table = pickle.load(fileObject)

    if any(obj['notebook_name'] ==
           check_notebook_name_exists_dict['notebook_name'] for obj in table):
        return json_encoder.encode({
            "message": "Success",
            "comment": "Notebook name exists"
        })

    return json_encoder.encode({
        "message": "Success",
        "comment": "Notebook name available"
    })
コード例 #5
0
def add_user():
    try:
        user = request.json
        print(user)

        # if new installation, create a new user table
        if not (user_table_exists()):
            create_new_user_table()

        fileObject = open("USERTABLE", "rb")
        table = pickle.load(fileObject)
        fileObject.close()

        # newly created user does not have any notebooks
        obj = {
            "username": user['username'],
            "password": user['password'],
            "created_notebooks": []
        }

        table.append(obj)

        # save updated user table
        fileObject = open("USERTABLE", "wb")
        pickle.dump(table, fileObject)
        fileObject.close()

        return json_encoder.encode({"message": "success"})
    except:
        return json_encoder.encode({"message": "failure"})
コード例 #6
0
def check_username_exists(check_username_exists_json):
    check_username_exists_dict = json_decoder.decode(
        check_username_exists_json)

    print("\n", check_username_exists_dict, "\n")
    # if new installation, create new user table
    if not (user_table_exists()):
        create_new_user_table()

    # check user table is username exists
    fileObject = open("USERTABLE", "rb")
    table = pickle.load(fileObject)
    print("\n", table, "\n")
    if any(obj['username'] == check_username_exists_dict['username']
           for obj in table):
        print("exists")
        return json_encoder.encode({
            "message": "Success",
            "comment": "Username exists"
        })

    return json_encoder.encode({
        "message": "Success",
        "comment": "Username available"
    })
コード例 #7
0
def add_notebook():
    print("changed")
    try:
        # notebook is of type weakdict to create references of it
        notebook = weakdict(request.json)

        # creating a reference of notebook's data
        weakdict_notebook = weakref.proxy(notebook)

        fileObject = open("NOTEBOOK_" + notebook['notebook_name'], "wb")
        pickle.dump(notebook, fileObject)
        fileObject.close()

        # if new installation, create a global notebooks table

        if not (notebook_global_table_exist()):
            create_notebook_global_table()

        # open global notebooks table and add notebook configuration

        fileObject = open("NOTEBOOKS_DATA", "rb")
        table = pickle.load(fileObject)
        fileObject.close()

        table.append({
            "notebook_name": notebook['notebook_name'],
            "GPU_count": int(notebook['GPU_count']),
            "CPU_count": int(notebook['CPU_count']),
            "is_online": False
        })

        fileObject = open("NOTEBOOKS_DATA", "wb")
        pickle.dump(table, fileObject)
        fileObject.close()

        # open global user table and add this notebook to user's list of created notebooks

        table = pickle.load(open("USERTABLE", "rb"))
        for obj in table:
            if obj['username'] == notebook['username']:
                obj['created_notebooks'].append(notebook['notebook_name'])

        pickle.dump(table, open("USERTABLE", "wb"))

        return json_encoder.encode({
            "message": "Success",
            "comment": "Notebook created"
        })
    except:
        return json_encoder.encode({
            "message": "Failure",
            "comment": "Other error"
        })
コード例 #8
0
def upload_predefined():

    # Use a pre existing dataset
    # Boston housing
    # CIFAR10
    # CIFAR100
    # Iris
    # Oxford17 flowers
    # MNIST

    data = request.json
    notebook = get_notebook_data(data['notebook_name'])

    # load dataset from datasets folder into notebook

    _x = numpy.load(open("datasets/" + data['dataset_name'] + "/X", "rb"))

    notebook['x_raw'] = numpy.reshape(_x,
                                      newshape=(-1, numpy.prod(_x.shape[1:])))
    notebook['y_raw'] = numpy.load(
        open("datasets/" + data['dataset_name'] + "/Y", "rb"))

    set_notebook_data(data['notebook_name'])

    return json_encoder.encode({
        "message": "Success",
        "comment": "Data loaded successfully"
    })
コード例 #9
0
def compile_sequential_model():
    # Compiles and Trains neural network

    data = request.json

    notebook = get_notebook_data(data['notebook_name'])
    notebook['hyperparameters'] = data['hyperparameters']
    notebook["history"] = {
        "acc": [],
        "val_acc": [],
        "loss": [],
        "val_loss": []
    }

    # allocate specified device while creating notebook

    config = tensorflow.ConfigProto()
    config.gpu_options.allow_growth = True
    config.gpu_options.per_process_gpu_memory_fraction = (
        notebook["GPU_count"] / len(GPUtil.getAvailable()))
    keras.backend.tensorflow_backend.set_session(
        tensorflow.Session(config=config))

    notebook['is_online'] = True

    # load created model
    model = keras.models.model_from_json(notebook['model'])

    # compile with client-sent hyperparamters
    model.compile(loss=data['hyperparameters']['loss'],
                  optimizer=keras.optimizers.SGD(
                      lr=float(data['hyperparameters']['learning_rate']),
                      momentum=float(data['hyperparameters']['momentum']),
                      nesterov=bool(data['hyperparameters']['nesterov'])),
                  metrics=['acc'])

    # Training starts
    model.fit(x=notebook['x_train'],
              y=notebook['y_train'],
              batch_size=128,
              validation_data=(notebook['x_test'], notebook['y_test']),
              epochs=int(data['hyperparameters']['epochs']),
              callbacks=[on_epoch_end_callback(notebook=notebook)])

    # save model separately as model weights could not be pickled
    model.save("NOTEBOOK_" + data['notebook_name'] +
               "_neural_network_model.hdf5")

    notebook['model'] = model.to_json()
    set_notebook_data(data['notebook_name'])

    try:
        keras.backend.clear_session()
    except:
        pass

    return json_encoder.encode({
        "message": "Success",
        "comment": "Compiled model and trained"
    })
コード例 #10
0
def preprocessing():

    data = request.json

    notebook = get_notebook_data(data['notebook_name'])

    def _preprocess(my_json, X):
        module = importlib.import_module('sklearn.' + my_json['module'])
        _class = getattr(module, my_json['class'])
        try:
            _X = _class(**my_json['hyperparameters']).fit_transform(X)
        except:
            _X = _class(**my_json['hyperparameters']).fit(X)
        return _X

    X = notebook['x_raw'] if 'x_preprocessed' not in notebook else notebook[
        'x_preprocessed']

    if (data['model_parameters']['module'] in PREPROC):
        _X = _preprocess(data['model_parameters'], X)
        notebook['x_preprocessed'] = _X
        notebook['preprocessing_applied'] = data['model_parameters']['class']
        notebook['has_columns'] = data['has_columns']
        notebook['uploaded_file_type'] = data['uploaded_file_type']

    set_notebook_data(data['notebook_name'])

    return json_encoder.encode({
        "message": "Success",
        "comment": "Preprocessor applied"
    })
コード例 #11
0
def create_sequential_model():
    data = request.json

    notebook = get_notebook_data(data['notebook_name'])

    notebook['model_type'] = "NEURAL NETWORK"

    # boolean for server sent events notifier
    notebook['_epoch_done'] = False

    notebook['numLayers'] = data['numLayers']
    layers = data['layers']
    notebook['modelLayers'] = layers

    # get input shape for the first layer
    input_shape = notebook['x_preprocessed'].shape[
        1:] if 'x_preprocessed' in notebook else notebook['x_raw'].shape[1:]

    # using keras sequential API
    model = keras.Sequential()

    # use eval to evaluate strings received by client (in JSON format) to create layers
    # add these layers to the model

    # special case to include input shape
    model.add(
        eval("keras.layers." + layers[1]['layerType'] + "(" + ",".join([
            str(dct["name"]) + "=" + (str(dct["defaultValue"]) if str_isfloat(
                dct["defaultValue"]) else "'" + dct["defaultValue"] + "'")
            for dct in layers[1]['defaultOptions']
            if dct["defaultValue"] not in {True, False, None}
        ] + ["input_shape=" + str(input_shape)]) + ")"))

    for layer in layers[2:]:
        model.add(
            eval("keras.layers." + layer['layerType'] + "(" + ",".join([
                str(dct["name"]) + "=" +
                (str(dct["defaultValue"]) if str_isfloat(dct["defaultValue"])
                 else "'" + dct["defaultValue"] + "'")
                for dct in layer['defaultOptions']
                if dct["defaultValue"] not in {True, False, None}
            ]) + ")"))

    model.summary()

    # store model in json format
    notebook['model'] = model.to_json()
    set_notebook_data(notebook['notebook_name'])

    # clear the graph to avoid errors
    try:
        keras.backend.clear_session()
    except:
        pass

    return json_encoder.encode({
        "message": "Success",
        "comment": "Model Created!"
    })
コード例 #12
0
def get_user_notebooks(get_user_notebooks_json):
    get_user_notebooks_dict = json_decoder.decode(get_user_notebooks_json)

    # opens user table and returns list of notebooks opened by that user
    table = pickle.load(open("USERTABLE", "rb"))

    print("table", table, get_user_notebooks_dict)

    for obj in table:
        if (obj['username'] == get_user_notebooks_dict['username']):
            print("username match")
            return json_encoder.encode({
                "message": "Success",
                "notebook_names": obj['notebooks']
            })

    return json_encoder.encode({"message": "Failure"})
コード例 #13
0
def upload_raw():

    # Check if file has been uploaded succesfully
    if 'file' not in request.files:
        return json_encoder.encode({
            "message": "Failure",
            "comment": "No file received"
        })

    file = request.files['file']

    # Check if there is a file
    if file.filename == '':
        return json_encoder.encode({
            "message": "Failure",
            "comment": "No file selected"
        })

    # Check for file types
    if file and allowed_file(file.filename):
        filename = secure_filename(file.filename)
        file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))

        # extract raw binary data
        rawObject = RawFile(
            os.path.join(app.config['UPLOAD_FOLDER'], filename), {
                'features': [0],
                'labels': [1]
            }, False)
        X, Y = rawObject.extract()

        data = request.json
        notebook = get_notebook_data(data['notebook_name'])

        # store dataset in notebook
        notebook['x_raw'] = X
        notebook['y_raw'] = Y

        set_notebook_data(data['notebook_name'])

    return json_encoder.encode({
        "message": "Success",
        "comment": "Data loaded successfully"
    })
コード例 #14
0
def load_existing_notebook_details(notebook_details):
    # Send data to UI to load back existing notebook
    print(notebook_details)
    data = json_decoder.decode(notebook_details)
    notebook = get_notebook_data(data['notebook_name'])
    dct = {}
    for key in notebook:
        dct[key] = notebook[key]

    return json_encoder.encode({"message": "Success", "notebook_data": dct})
コード例 #15
0
def pick_tool(vulnerabilities_json):
    print("here")
    status_code = -1
    status_message = 'Error'
    return_data = ""
    print(vulnerabilities_json)
    # vulnerabilities_dict = json_encoder.encode(vulnerabilities_json)
    vulnerabilities_dict = json_decoder.decode(vulnerabilities_json)
    vulnerabilities = vulnerabilities_dict["vulnerabilities"]

    username = vulnerabilities_dict["username"]
    print("\nusername: "******"\n")

    url = vulnerabilities_dict["url"]
    url = url.strip('\"')
    print("\n " + vulnerabilities_dict["url"] + "\n")

    if "ssl" in vulnerabilities:
        url = url.split('//')
        if url[0] == 'https:' or url[0] == 'http:':
            url.pop(0)
        url_modified = ''.join(map(str, url))
        url_modified = url_modified.split('/')
        url_modified_stripped = url_modified[0]
        print(url_modified_stripped)
        ss = VulnerabilityScanTools()
        return_data += ss.sslyzer_scan("www.pes.edu:443", "full")
    elif "xsser" in vulnerabilities:
        ss = VulnerabilityScanTools()
        return_data += ss.xsser_scan("https://hack.me/")
    elif "nikto" in vulnerabilities:
        ss = VulnerabilityScanTools()
        return_data += ss.nikto_scan("www.isanalytics.com")

    print("Notebook name", vulnerabilities_dict["notebook_name"])

    #setting notebook data
    fileObject = open("NOTEBOOK_" + vulnerabilities_dict["notebook_name"],
                      "rb")
    table = pickle.load(fileObject)
    print(type(table))
    fileObject.close()

    table["url"] = vulnerabilities_dict["url"]
    table["vulnerabilities"] = vulnerabilities_dict["vulnerabilities"]
    table["report"] = return_data

    fileObject = open("NOTEBOOK_" + vulnerabilities_dict["notebook_name"],
                      "wb")
    pickle.dump(table, fileObject)
    fileObject.close()

    print("return data", return_data)
    return json_encoder.encode({"message": "Success", "report": return_data})
コード例 #16
0
def get_roc_curve(notebook_name_json):
    notebook_name_dict = json_decoder.decode(notebook_name_json)

    notebook = get_notebook_data(notebook_name_dict['notebook_name'])

    # does predict on testing data

    # to invoke keras predict
    if notebook['model_type'] == "NEURAL NETWORK":
        model = keras.models.load_model("NOTEBOOK_" +
                                        notebook_name_dict['notebook_name'] +
                                        "_neural_network_model.hdf5")
        probs = model.predict(notebook['x_test'])

    # to invoke sklearn predict
    else:
        probs = notebook['model'].predict_proba(notebook['x_test'])

    preds = probs[:, 1]
    fpr, tpr, threshold = roc_curve(notebook['y_test'], preds)
    roc_auc = auc(fpr, tpr)

    # Create ROC plot
    plt.title('Receiver Operating Characteristic')
    plt.plot(fpr, tpr, 'b', label='AUC = %0.2f' % roc_auc)
    plt.legend(loc='lower right')
    plt.plot([0, 1], [0, 1], 'r--')
    plt.xlim([0, 1])
    plt.ylim([0, 1])
    plt.ylabel('True Positive Rate')
    plt.xlabel('False Positive Rate')

    # Save plot to be used by vue.js
    filename = "NOTEBOOK_" + notebook['notebook_name'] + "_roc_curve.jpg"
    plt.savefig("../UI/src/assets/" + filename)

    plt.clf()

    # Save file name in notebook
    notebook['roc_curve'] = filename

    set_notebook_data(notebook_name_dict['notebook_name'])

    try:
        keras.backend.clear_session()
    except:
        pass

    return json_encoder.encode({"message": "Success", "roc_curve": filename})
コード例 #17
0
def get_devices():
    # Currently works only on Linux
    # Fails on windows.
    # Check for unix
    if (os.name == 'posix'):
        n_cpu = psutil.cpu_count()

    # if new installation, create a global notebooks table
    if not (notebook_global_table_exist()):
        create_notebook_global_table()

    table = pickle.load(open("NOTEBOOKS_DATA", "rb"))
    a_cpu = n_cpu - sum(obj["CPU_count"] for obj in table)

    return json_encoder.encode({"message": "Success", "CPU_available": a_cpu})
コード例 #18
0
def load_existing_notebook():
    # Send data to UI to load back existing notebook

    data = request.json
    notebook = get_notebook_data(data['notebook_name'])

    dct = {}
    for key in notebook:
        # need not send model and other array type of data
        if key not in {
                'model', 'x_raw', 'x_preprocessed', 'x_test', 'x_train',
                'y_test', 'y_train', '_model', 'y_raw', 'confusion_matrix'
        }:
            dct[key] = notebook[key]

    return json_encoder.encode({"message": "Success", "notebook_data": dct})
コード例 #19
0
    def explain_instance_tabular_data(instance):
        newshape = numpy.prod(instance.shape)

        if notebook['model_type'] == "NEURAL NETWORK":
            model = keras.models.load_model(
                "NOTEBOOK_" + notebook_name_dict['notebook_name'] +
                "_neural_network_model.hdf5")
            target = list(
                map(
                    numpy.argmax,
                    model.predict(
                        numpy.reshape(instance,
                                      newshape=(1, *instance.shape)))[0]))[0]
        else:
            target = notebook['model'].predict([instance])[0]

        explainer = lt.LimeTabularExplainer(
            training_data=notebook['x_train'],
            feature_names=[str(i) for i in range(len(instance))])
        exp = explainer.explain_instance(instance,
                                         predict_fn,
                                         num_features=len(instance),
                                         num_samples=min(
                                             len(notebook['x_train']), 100),
                                         labels=(target, ))
        exp.as_pyplot_figure(label=target).savefig(
            "../UI/src/assets/" + "NOTEBOOK_" + notebook['notebook_name'] +
            "_investigate_model_instance1.jpg",
            figsize=(50, 50))
        exp.save_to_file(file_path="../UI/src/assets/" + "NOTEBOOK_" +
                         notebook['notebook_name'] +
                         "_investigate_model_instance.html")
        notebook['explanation'] = "NOTEBOOK_" + notebook[
            'notebook_name'] + "_investigate_model_instance.html"

        set_notebook_data(notebook_name_dict['notebook_name'])

        try:
            keras.backend.clear_session()
        except:
            pass

        return json_encoder.encode({
            'explanation':
            "NOTEBOOK_" + notebook['notebook_name'] +
            "_investigate_model_instance.html"
        })
コード例 #20
0
def get_confusion_matrix(notebook_name_json):
    notebook_name_dict = json_decoder.decode(notebook_name_json)

    notebook = get_notebook_data(notebook_name_dict['notebook_name'])

    # to invoke keras predict
    if notebook['model_type'] == "NEURAL NETWORK":
        model = keras.models.load_model("NOTEBOOK_" +
                                        notebook_name_dict['notebook_name'] +
                                        "_neural_network_model.hdf5")
        prediction = model.predict(notebook['x_test'])
        prediction = numpy.array(list(map(numpy.argmax, prediction)))

    # to invoke sklearn predict
    else:
        model = notebook['model']
        prediction = model.predict(notebook['x_test'])

    # de-"one hot"
    y_test = notebook['y_test']
    if len(y_test.shape) > 1:
        y_test = numpy.array(list(map(numpy.argmax, y_test)))

    matrix = confusion_matrix(notebook['y_test'], prediction).ravel()

    # save confusion matrix in notebook
    # did not consider for multiclass labels while displaying
    notebook['confusion_matrix'] = matrix
    notebook['true_negative'] = int(matrix[0])
    notebook['false_positive'] = int(matrix[1])
    notebook['false_negative'] = int(matrix[2])
    notebook['true_positive'] = int(matrix[3])

    set_notebook_data(notebook_name_dict['notebook_name'])

    try:
        keras.backend.clear_session()
    except:
        pass

    return json_encoder.encode({
        "message": "Success",
        "confusion_matrix": matrix.tolist()
    })
コード例 #21
0
def get_devices():
    # Currently works only on Linux
    # Fails on windows.

    # Check for unix
    if (os.name == 'posix'):

        # get GPU count if avaiable
        try:
            n_gpu = len(GPUtil.getGPUs())
        except:
            n_gpu = 0

        # get CPU count
        n_cpu = psutil.cpu_count()

    # For windows, use tensorflow to get details
    else:
        local_devices = device_lib.list_local_devices()
        n_gpu = len([x.name for x in local_devices if x.device_type == 'GPU'])
        n_cpu = psutil.cpu_count()

    # if new installation, create a global notebooks table

    if not (notebook_global_table_exist()):
        create_notebook_global_table()

    table = pickle.load(open("NOTEBOOKS_DATA", "rb"))

    # Send number of CPUs and GPUs currently available

    a_gpu = n_gpu - sum(obj["GPU_count"] for obj in table if obj['is_online'])
    a_cpu = n_cpu - sum(obj["CPU_count"] for obj in table if obj['is_online'])

    return json_encoder.encode({
        "message": "Success",
        "GPU_count": n_gpu,
        "CPU_count": n_cpu,
        "GPU_available": a_gpu,
        "CPU_available": a_cpu
    })
コード例 #22
0
def set_train_test_data():

    data = request.json
    notebook = get_notebook_data(data['notebook_name'])

    # used preprocessed data if it exists, or else raw data
    X = notebook['x_raw'] if 'x_preprocessed' not in notebook else notebook[
        'x_preprocessed']

    notebook['hyperparameters'] = {}
    notebook['hyperparameters']['test_size'] = data['test_size']

    # splits dataset into 4 parts and stores it in the notebook
    notebook['x_train'], notebook['x_test'], notebook['y_train'], notebook[
        'y_test'] = train_test_split(X,
                                     notebook['y_raw'],
                                     test_size=data['test_size'])

    set_notebook_data(data['notebook_name'])

    return json_encoder.encode({"message": "Success", "comment": "Data set"})
コード例 #23
0
def get_accuracy(notebook_name_json):
    notebook_name_dict = json_decoder.decode(notebook_name_json)

    notebook = get_notebook_data(notebook_name_dict['notebook_name'])

    # to invoke keras predict
    if notebook['model_type'] == "NEURAL NETWORK":
        model = keras.models.load_model("NOTEBOOK_" +
                                        notebook_name_dict['notebook_name'] +
                                        "_neural_network_model.hdf5")
        prediction = model.predict(notebook['x_test'])
        prediction = numpy.array(list(map(numpy.argmax, prediction)))

    # to invoke sklearn predict
    else:
        model = notebook['model']
        prediction = model.predict(notebook['x_test'])

    # de-"one hot"
    y_test = notebook['y_test']
    if len(y_test.shape) > 1:
        y_test = numpy.array(list(map(numpy.argmax, y_test)))

    accuracy = accuracy_score(notebook['y_test'], prediction)

    # save accuracy in notebook
    notebook['accuracy'] = accuracy

    set_notebook_data(notebook_name_dict['notebook_name'])

    try:
        keras.backend.clear_session()
    except:
        pass

    return json_encoder.encode({
        "message": "Success",
        "accuracy": str(accuracy)
    })
コード例 #24
0
def pick_tool():
    print("here")
    status_code = -1
    status_message = 'Error'
    return_data = ""
    vulnerabilities_json = request.json

    print(vulnerabilities_json)
    # vulnerabilities_dict = json_encoder.encode(vulnerabilities_json)
    vulnerabilities_dict = vulnerabilities_json
    vulnerabilities = vulnerabilities_dict["vulnerabilities"]

    username = vulnerabilities_dict["username"]
    print("\nusername: "******"\n")

    url = vulnerabilities_dict["url"]
    #url = url.strip('\"')
    print("\n " + vulnerabilities_dict["url"] + "\n")
    ss = VulnerabilityScanTools()

    if "ssl" in vulnerabilities:
        return_data += ss.sslyzer_scan(vulnerabilities_dict["url"], "full")
    elif "xss" in vulnerabilities:
        return_data += ss.xsser_scan(vulnerabilities_dict["url"])
    elif "arachni" in vulnerabilities:
        return_data += ss.arachni_scan(vulnerabilities_dict["url"])
    elif "nikto" in vulnerabilities:
        return_data += ss.nikto_scan(vulnerabilities_dict["url"])
    elif "genscan" in vulnerabilities:
        return_data += ss.rapidscan_scan(vulnerabilities_dict["url"])
    elif "form-security" in vulnerabilities:
        return_data += ss.w3af_scan(url, "form-security")
    elif "clickjacking" in vulnerabilities:
        return_data += ss.w3af_scan(url, "clickjacking")
    elif "backdoor-info" in vulnerabilities:
        return_data += ss.w3af_scan(url, "backdoor-info")
    elif "cookies" in vulnerabilities:
        return_data += ss.w3af_scan(url, "cookies")
    elif "server-info" in vulnerabilities:
        return_data += ss.w3af_scan(url, "server-info")
    elif "allowed-methods" in vulnerabilities:
        return_data += ss.w3af_scan(url, "allowed-methods")
    elif "gsb" in vulnerabilities:
        return_data += ss.gsb_scan(url)
    elif "netcraft" in vulnerabilities:
        return_data = str(ss.netcraft(url))

    print("Notebook name", vulnerabilities_dict["notebook_name"])
    #setting notebook data
    fileObject = open("NOTEBOOK_" + vulnerabilities_dict["notebook_name"],
                      "rb")
    table = pickle.load(fileObject)
    print(type(table))
    fileObject.close()

    table["url"] = vulnerabilities_dict["url"]
    table["vulnerabilities"] = vulnerabilities_dict["vulnerabilities"]
    table["report"] = return_data

    fileObject = open("NOTEBOOK_" + vulnerabilities_dict["notebook_name"],
                      "wb")
    pickle.dump(table, fileObject)
    fileObject.close()

    print("return data", return_data)
    return json_encoder.encode({"message": "Success", "report": return_data})
コード例 #25
0
    def explain_instance_image_data(instance):

        newshape = numpy.prod(instance.shape)

        if notebook['model_type'] == "NEURAL NETWORK":
            model = keras.models.load_model(
                "NOTEBOOK_" + notebook_name_dict['notebook_name'] +
                "_neural_network_model.hdf5")
            target = list(
                map(
                    numpy.argmax,
                    model.predict(
                        numpy.reshape(instance,
                                      newshape=(1, *instance.shape)))[0]))[0]
        else:
            target = notebook['model'].predict(
                numpy.reshape(instance, newshape=newshape))

        plt.savefig("../UI/src/assets/" + "NOTEBOOK_" +
                    notebook['notebook_name'] +
                    "_investigate_model_instance0.jpg")
        plt.clf()

        explainer = lt.LimeTabularExplainer(
            training_data=notebook['x_train'],
            feature_names=[str(i) for i in range(len(instance))]
            if 'column_names' not in notebook else notebook['column_names'])
        exp = explainer.explain_instance(instance,
                                         predict_fn,
                                         num_features=len(instance),
                                         num_samples=newshape,
                                         labels=(target, ))
        exp.as_pyplot_figure(label=target).savefig(
            "../UI/src/assets/" + "NOTEBOOK_" + notebook['notebook_name'] +
            "_investigate_model_instance1.jpg",
            figsize=(50, 50))
        exp.save_to_file(file_path="../UI/src/assets/" + "NOTEBOOK_" +
                         notebook['notebook_name'] +
                         "_investigate_model_instance.html")

        explaination_list = exp.as_map()[target]
        constructed_image = numpy.zeros(shape=len(explaination_list))

        for i, j in explaination_list:
            constructed_image[i] = 0.5 + j / 2

        constructed_image = numpy.reshape(constructed_image,
                                          newshape=instance.shape)
        plt.savefig("../UI/src/assets/" + "NOTEBOOK_" +
                    notebook['notebook_name'] +
                    "_investigate_model_instance2.jpg")
        plt.clf()
        notebook['explanation'] = "NOTEBOOK_" + notebook[
            'notebook_name'] + "_investigate_model_instance.html"
        set_notebook_data(notebook_name_dict['notebook_name'])

        try:
            keras.backend.clear_session()
        except:
            pass

        return json_encoder.encode({
            'instance':
            "NOTEBOOK_" + notebook['notebook_name'] +
            "_investigate_model_instance0.jpg",
            'explanation':
            "NOTEBOOK_" + notebook['notebook_name'] +
            "_investigate_model_instance.html",
            'constructed':
            "NOTEBOOK_" + notebook['notebook_name'] +
            "_investigate_model_instance2.jpg"
        })
コード例 #26
0
def create_non_neural_network_model():

    data = request.json

    notebook = get_notebook_data(data['notebook_name'])

    notebook['hyperparameters'] = data['model_parameters']
    notebook['model_type'] = "NON NEURAL NETWORK"
    notebook['model_name'] = data['model_parameters']['class']

    # change the data types of parameters to required data types
    for key in data['model_parameters']['hyperparameters']:
        try:
            if (data['model_parameters']['hyperparameters'][key] == 'None'):
                data['model_parameters']['hyperparameters'][key] = None
            elif (data['model_parameters']['hyperparameters'][key].isdigit()):
                data['model_parameters']['hyperparameters'][key] = int(
                    data['model_parameters']['hyperparameters'][key])
            elif (str_isfloat(
                    data['model_parameters']['hyperparameters'][key])):
                data['model_parameters']['hyperparameters'][key] = float(
                    data['model_parameters']['hyperparameters'][key])
            elif (data['model_parameters']['hyperparameters'][key] == 'True'):
                data['model_parameters']['hyperparameters'][key] = True
            elif (data['model_parameters']['hyperparameters'][key] == 'False'):
                data['model_parameters']['hyperparameters'][key] = False
        except:
            pass

    # allocate CPUs if possible
    if 'n_jobs' in data['model_parameters']['hyperparameters']:
        data['model_parameters']['hyperparameters']['n_jobs'] = notebook[
            'CPU_count']

    notebook['is_online'] = True

    def train_supervised(x_train, y_train, my_json):

        # import just the required class from specific module and train the model

        module = importlib.import_module('sklearn.' + my_json['module'])
        _class = getattr(module, my_json['class'])
        model = _class(**my_json['hyperparameters'])
        model.fit(x_train, y_train)

        # deallocate devices after training

        notebook['is_online'] = False

        return model

    def train_unsupervised(X, my_json):

        # import just the required class from specific module and train the model

        module = importlib.import_module('sklearn.' + my_json['module'])
        _class = getattr(module, my_json['class'])
        model = _class(**my_json['hyperparameters'])

        try:
            model.fit_transform(X)
        except:
            model.fit(X)

        # deallocate devices after training

        notebook['is_online'] = False

        return model

    # train supervised and unsupervised algorithms separately

    # supervised algorithms require 1-D array of Y training samples which contain class labels
    if data['model_parameters']['module'] in SUPER:
        print("\n", numpy.array(list(map(numpy.argmax, notebook['y_train']))),
              "\n")
        y_train = notebook['y_train'] if len(
            notebook['y_train'].shape) <= 2 else numpy.array(
                list(map(numpy.argmax, notebook['y_train'])))
        notebook['model'] = train_supervised(notebook['x_train'], y_train,
                                             data['model_parameters'])

    # unsupervised algorithms require on X training samples
    elif data['model_parameters']['module'] in UNSUPER:
        notebook['model'] = train_supervised(notebook['x_train'],
                                             data['model_parameters'])

    set_notebook_data(data['notebook_name'])

    return json_encoder.encode({
        "message": "Success",
        "comment": "Model trained"
    })
コード例 #27
0
def get_precision_recall_curve(notebook_name_json):
    notebook_name_dict = json_decoder.decode(notebook_name_json)

    notebook = get_notebook_data(notebook_name_dict['notebook_name'])

    # to invoke sklearn predict
    if notebook['model_type'] == "NEURAL NETWORK":
        model = keras.models.load_model("NOTEBOOK_" +
                                        notebook_name_dict['notebook_name'] +
                                        "_neural_network_model.hdf5")
        prediction = model.predict(notebook['x_test'])
        prediction = numpy.array(list(map(numpy.argmax, prediction)))

    # to invoke sklearn predict
    else:
        model = notebook['model']
        prediction = model.predict(notebook['x_test'])

    y_score = prediction

    # de-"one hot"
    y_test = notebook['y_test']
    if len(y_test.shape) > 1:
        y_test = numpy.array(list(map(numpy.argmax, y_test)))

    # Average precision curve for only binary class problems
    # can iterate for multi class
    y_test[y_test == y_test.min()] = 0
    y_test[y_test != 0] = 1

    average_precision = average_precision_score(y_test, y_score)

    notebook['average_precision_score'] = average_precision
    precision, recall, _ = precision_recall_curve(y_test, y_score)

    # create precision recall curve
    plt.step(recall, precision, color='b', alpha=0.2, where='post')
    plt.fill_between(recall, precision, step='post', alpha=0.2, color='b')
    plt.xlabel('Recall')
    plt.ylabel('Precision')
    plt.ylim([0.0, 1.05])
    plt.xlim([0.0, 1.0])
    plt.title('2-class Precision-Recall curve: AP={0:0.2f}'.format(
        average_precision))

    # save data and filename in the notebook for reloading
    filename = "NOTEBOOK_" + notebook[
        'notebook_name'] + "_precision_recall_curve.jpg"
    plt.savefig("../UI/src/assets/" + filename)
    plt.clf()
    notebook['precision_recall_curve'] = filename
    notebook['average_precision_score'] = average_precision
    notebook['recall'] = recall.mean()

    set_notebook_data(notebook_name_dict['notebook_name'])

    try:
        keras.backend.clear_session()
    except:
        pass

    return json_encoder.encode({
        "message": "Success",
        "precision_recall_curve": filename,
        "average_precision_score": average_precision,
        "recall": recall.mean()
    })