def fit(self, X, Y):
     self.X = X
     self.Y = Y
     self.num = X.shape[0]
     self.num_features = X.shape[1]
     self.random_init()
     l = []
     for i in range(self.iterations):
         l.append(self.cost(X, Y, self.a, self.b))
         self.update_weight(self.a, self.b, self.X, self.Y)
     l.append(self.cost(self.X, self.Y, self.a, self.b))
     return l
 def fit(self, X, Y):
     self.X = X
     self.Y = Y
     self.num = X.shape[0]
     self.num_features = X.shape[1]
     self.random_init()
     l = []
     ga, gb = [], []
     wa, wb = [], []
     for i in range(self.iterations):
         l.append(self.cost(X, Y, self.a, self.b))
         (w1, w2) = self.update_weight(self.a, self.b, self.X, self.Y)
         ga.append(w1), gb.append(w2), wa.append(self.a), wb.append(self.b)
     l.append(self.cost(self.X, self.Y, self.a, self.b))
     return l, ga, gb, wa, wb
示例#3
0
def sentence_encoder(messages=[]):
    error_count = 0
    error_count, embed = embed_tfhub()

    # Reduce logging output.
    tf.logging.set_verbosity(tf.logging.ERROR)

    with tf.Session() as session:
        session.run(
            [tf.global_variables_initializer(),
             tf.tables_initializer()])
        log.append(error_count,
                   "Loading data from : " + str(conf.cleaned_alert_data))
        message_embeddings = session.run(embed(messages))
        text_file = open("./data/" + conf.encoded_alert_msg, 'w').close()
        text_file = open("./data/" + conf.encoded_alert_msg, "a")
        log.append(
            error_count, "Writing message and encoding data to text file : " +
            str(conf.encoded_alert_msg))
        for i, message_embedding in enumerate(
                np.array(message_embeddings).tolist()):
            message_embedding_snippet = ", ".join(
                (str(x) for x in message_embedding[:3]))
            text_file.write(
                format(messages[i])[0:13] + ", " +
                format(message_embedding_snippet) + "\n")
        text_file.close()
        log.append(
            error_count,
            "Writing complete to text file : " + str(conf.encoded_alert_msg))
    return error_count
示例#4
0
def scatter_3D_plot(messages,
                    title="cartesian 3D plot",
                    fname="scatter_plot_3D.png"):
    # open from file
    from mpl_toolkits.mplot3d import Axes3D
    error_count = 0
    log.append(error_count, "Invoking a new figure (png) to store the plot.")
    fig = plt.figure()
    ax = fig.add_subplot(111, projection='3d')

    label = []
    xs = []
    ys = []
    zs = []
    c = ['r', 'b']
    m = 'o'

    log.append(
        error_count,
        "Parsing the row to assign data elements for axese: x, y, z, and l (label)"
    )
    for row in messages:
        l = ''.join(map(str, row[0:1]))
        x = ''.join(map(str, row[1:2]))
        y = ''.join(map(str, row[2:3]))
        z = ''.join(map(str, row[3:4]))
        print l, x, y, z
        #keep flaot() else throws an error
        xs.append(float(x))
        ys.append(float(y))
        zs.append(float(z))
    log.append(error_count,
               "Assign x, y, z values, axis labels, and title to the plot.")
    ax.scatter(xs, ys, zs, c=c, marker=m)

    ax.set_xlim3d(min(xs), max(xs))
    ax.set_ylim3d(min(ys), max(ys))
    ax.set_zlim3d(min(zs), max(zs))
    ax.set_xlabel('X axis')
    ax.set_ylabel('Y axis')
    ax.set_zlabel('Z axis')
    ax.set_title("CAP message sentence encoding plot")

    log.append(error_count, "Saving pplot to file ./plots/" + str(fname))
    plt.savefig("./plots/" + fname, dpi=300, bbox_inces='tight')
    #plt.show()
    return error_count
示例#5
0
文件: main.py 项目: waidyanatha/IAN
def initiatlize():
    init_err_cnt = 0
    log.append(init_err_cnt, "system version " + str(sys.version))
    log.append(init_err_cnt, "tesnorflow verion " + str(tf.VERSION))
    log.append(init_err_cnt, "keras version" + str(tf.keras.__version__))
    from tensorflow.python.client import device_lib
    print device_lib.list_local_devices()
    #
    # plots directory
    dir_path = "./plots/"
    if not os.path.exists(dir_path):
        os.makedirs(dir_path)
        log.append(
            0, "directory path does not exists, creating new directory " +
            str(dir_path))
    # data directory
    dir_path = "./data/"
    if not os.path.exists(dir_path):
        os.makedirs(dir_path)
        log.append(
            0, "directory path does not exists, creating new directory " +
            str(dir_path))
    #
    # check parameters and echo call functionaX
    log.append(
        0, "initialization done returning error count " + str(init_err_cnt))
    return init_err_cnt, "done"
示例#6
0
文件: main.py 项目: waidyanatha/IAN
    # check parameters and echo call functionaX
    log.append(
        0, "initialization done returning error count " + str(init_err_cnt))
    return init_err_cnt, "done"


#
######################################################################################
#
#    MAIN CALLS
#
######################################################################################
error_count = 0
tstart = dt.datetime.now()
print(str(tstart) + ": begin message text classification with TF and NLP ")
log.append(error_count, "begin message text classification with TF and NLP ")
#
#######################################################
# Initialize & prerequisits
#######################################################
if error_count == 0:
    log.append(error_count, "starting process initializing algorithms. ")
    init_err, init_str = initiatlize()
    if init_err > 0:
        error_count += init_err
        log.append(
            error_count, "initialization failed with error count: " +
            str(error_count) + " value : " + str(init_str))
    else:
        log.append(
            error_count, "initializing completed with error count: " +
示例#7
0
def load_data_from_source():
    error_count = 0
    log.append(error_count,
               "Fetching data file ./data/" + str(conf.alerts_file))
    if not os.path.exists("./data/" + str(conf.alerts_file)):
        error_count += 1
        log.append(
            error_count, "./data/" + conf.alerts_file +
            " does not exist. Error count: " + str(error_count))
    else:
        log.append(error_count,
                   "loading data from: ./data/" + conf.alerts_file)
        with open("./data/" + str(conf.alerts_file)) as csv_file:
            csv_reader = csv.reader(csv_file, delimiter=',')
            line_count = 0
            alert_list = []
            text_file = open("./data/" + conf.cleaned_alert_data, "w")
            for row in csv_reader:
                #skip the header
                if line_count == 0:
                    log.append(error_count,
                               "number of attributes: " + str(len(row)))
                    log.append(error_count, ', '.join(row))
                else:
                    tmp_str = row[0] + " " + row[1] + " " + row[2] + " " + row[
                        3] + " " + row[4] + " " + row[5] + " " + row[
                            6] + " " + row[7] + " "
                    tmp_str += " " + row[8] + " " + row[9] + " " + row[
                        10] + " " + row[11] + " " + row[12] + " " + row[
                            13] + " " + row[14] + " " + row[15]
                    tmp_str = clean_str(tmp_str)
                    alert_list[line_count:0] = tmp_str
                    text_file.write('"%s"\n' % tmp_str)
                line_count += 1

            log.append(
                error_count, "Processed " + str(line_count) + " rows " +
                " (1 header row and " + str(line_count - 1) + " data rows).")
            text_file.close()
            log.append(
                error_count, "finished writing alert data to a file ./data/" +
                conf.cleaned_alert_data)
    return error_count, conf.cleaned_alert_data
示例#8
0
def embed_tfhub():
    error_count = 0
    module_url = conf.tf_hub_module_url
    log.append(error_count, "Tensorflow Hub URL: " + str(module_url))
    log.append(error_count, "Embedding: " + str(hub.Module(module_url)))
    return error_count, hub.Module(module_url)
示例#9
0
def micLoop():
    if I2C.getMic():
        log.append('mic')
        print('[' + log.getTimestamp() + '] Microphone: Cockroach Detected')

    newThread(0.5, micLoop)
示例#10
0
pidLoop()
micLoop()
micLedLoop()

try:

    while True:  # main loop
        #image bug detection
        #print("threads: " + str(threading.active_count()))
        I2C.setLed(I2C.YELLOW, True)
        filename = detect.saveImage()
        I2C.setLed(I2C.YELLOW, False)
        bugs = 0
        if ENABLE_MULTIPLE_BUGS:
            bugs = detect.detectMultiple(filename)
        else:
            bugs = detect.detectBug(filename)
        I2C.setBugCount(bugs)
        if bugs > 0:
            log.append('cam')
            print("[" + log.getTimestamp() + "] Image: Cockroach Detected")
            if ENABLE_MULTIPLE_BUGS:
                print('Number of bugs: ' + str(bugs))

except KeyboardInterrupt:
    print('Termination signal received, quitting...')
    GPIO.cleanup()
    log.dump()
    print('Cleanup finished')
    quit(0)