Beispiel #1
0
 def by_infile(self, infile):
     try:
         shutil.rmtree(self.OUTPUT_DIR)
     except:
         pass
     self.db_open()
     json_data = self.get_events_from_infile(infile)
     # build preprocessor
     ppr = Preprocessor()
     # Process raw data
     #X, Y, events_found = ppr.get_raw_data(DIMENSION, [RAW_FILE], bad)
     X, Y, events_found = ppr.get_from_json(self.DIMENSION, json_data)
     X, Y = ppr.remove_outliers(X, Y)
     X, Y = ppr.normalize(X, Y)
     trX, trY, teX, teY, vaX, vaY = ppr.partition_for_training(
         X, Y, 0.0, 1.0)
     ppr.store_training_partitions(trX, trY, teX, teY, vaX, vaY,
                                   self.INPUT_DIR)
     # build adapter
     adapter = MACAdapter(self.INPUT_DIR, self.DIMENSION, self.FOLDS)
     # build model
     convnet = ConvNet(self.DIMENSION)
     # build server
     server = ConvNetServer(adapter,
                            self.OUTPUT_DIR,
                            batch_size=self.BATCH_SIZE,
                            verbose=True,
                            use=True)
     x, durs, _ = server.get_testing_batch()
     with tf.Session() as sess:
         init = tf.global_variables_initializer()
         sess.run(init)
         convnet.restore(sess, self.INITIAL_WEIGHTS)
         predictions = sess.run((convnet.predictor),
                                feed_dict={
                                    convnet.x: x,
                                    convnet.durs: durs
                                })
     # Get event ids
     _, _, ids = adapter.get_ids()
     results = [{
         "eventID": int(ids[i]),
         "ml": {
             "aircraftProbability":
             round(np.around(predictions[i][0], decimals=4), 4),
             "model":
             self.MODEL
         }
     } for i in range(0, len(ids))]
     for result in results:
         self.insert_result_for_event(result)
     self.db_close()
if __name__ == '__main__':

    # JSON object returned from api_call
    # replace this with however you would like it to work in production
    json_data = json.load(open(EXAMPLE_FILE))
    # NOTE if events in json object have neither "aircraft" nor "community" fields
    # in they will be labeled as community for training - probably try to avoid this

    # build preprocessor
    ppr = Preprocessor()

    # Process raw data
    #X, Y, events_found = ppr.get_raw_data(DIMENSION, [RAW_FILE], bad)
    X, Y, events_found = ppr.get_from_json(DIMENSION, json_data)
    X, Y = ppr.remove_outliers(X, Y)
    X, Y = ppr.normalize(X, Y)
    # Shove all events into the "training" subdirectory
    trX, trY, teX, teY, vaX, vaY = ppr.partition_for_training(X, Y, 1.0, 0.0)
    # Store events in intermediate directory (will be deleted on subsequent trainings)
    ppr.store_training_partitions(trX, trY, teX, teY, vaX, vaY, INPUT_DIR)

    # build adapter
    adapter = MACAdapter(INPUT_DIR, DIMENSION, FOLDS)

    # build model
    convnet = ConvNet(DIMENSION)

    # build server
    server = ConvNetServer(adapter,
                           OUTPUT_DIR,
                           batch_size=BATCH_SIZE,