Beispiel #1
0
def restart():
    # clear_db()
    # clear_dirs()

    # print("SCRAPING STORIES")
    # scrape_stories()
    print("CONSTRUCTING KEYWORD GRAPH")
    construct_graph()
    print("CREATING MODEL")
    create_model()
    print("CREATING TF-IDF")
    tf_idf()
 def __init__(self, model_name='model'):
     try:
         json_file = open('model/%s.json' % model_name,
                          'r')  # open model structure
         loaded_model_json = json_file.read()  # read json
         json_file.close()  # close file
         loaded_model = model_from_json(
             loaded_model_json)  # make json keras model
         loaded_model.load_weights("model/%s.h5" %
                                   model_name)  # import the weights
         self.model = loaded_model  # store loaded model as self.model
     except:
         if model_name == 'model':
             print('Model not found.  Training model...'
                   )  # if model is not found, then build model
             self.model = create_model()  # build model
             print('Model trained.')
         else:
             json_file = open('model/%s.json' % model_name,
                              'r')  # open model structure
             loaded_model.load_weights("model/%s.h5" %
                                       model_name)  # import the weights
Beispiel #3
0
def plot_loss(lossList):
    # xnew = np.linspace(0, len(lossList), len(lossList)*0.5)
    # loss_smooth = spline(np.arange(len(lossList)), lossList, xnew)#损失函数平滑处理
    plt.style.use("ggplot")
    plt.figure()
    # loss
    # plt.plot(xnew, loss_smooth, 'g', label='loss_smo0th')
    plt.plot(range(len(lossList)), lossList, 'r', label='train loss')  #原版曲线
    plt.grid(True)
    plt.xlabel('batch')
    plt.ylabel('loss')
    plt.legend(loc="upper right")
    plt.show()


model = create_model()
model.load_weights('my_model_weights.h5')
model.compile(optimizer=SGD(lr=0.0001, momentum=0.9))

HDF5 = h5py.File('singlesofa_data.h5', 'r')
anchor_data = HDF5['anchor']
positive_data = HDF5['positive']
negative_data = HDF5['negative']
BATCH_SIZE = 32
lossList = []
EPOCHS = 100
TOTAL_STEPS = len(anchor_data)
STEPS_PER_EPOCH = int(TOTAL_STEPS / BATCH_SIZE) + 1
# STEPS_PER_EPOCH = 100
for i in range(0, EPOCHS):
    if i != 0: