Example #1
0
def main():
    max_episodes = 50000

    # replay_buffer = deque()

    # with tf.Session(config=tf.ConfigProto(device_count={'GPU':0})) as sess:
    with tf.Session() as sess:
        mainDQN = dqn.DQN(sess, pa.network_input_height, pa.network_input_width, pa.network_output_height, name="main")
        # mainDQN = dqn.DQN(sess, pa.horizon * (pa.renewable) + pa.num_queue * (pa.renewable + 1), pa.network_output_height, name="main")
        tf.compat.v1.global_variables_initializer().run()
        trajs = deque()
        trajs_rew = deque()
        for episode in range(max_episodes):
            e = 1. / ((episode / 1000) + 1)
            done = False
            step_count = 0
            state = env.observe()
            env.reset()
            info = []
            traj = []
            show_result = 0;
            while not done:
                if np.random.rand(1) < e:
                    action = env.random_action()
                    # if episode % 10 == 0:
                    #     print(info)
                    #     print(action)
                else:
                    action = np.argmax(mainDQN.predict(state))
                    # if episode % 10 == 0:
                    #     print(info)
                    #     print(mainDQN.predict(state))

                # print("action:"+str(action))
                # env.step(action)
                next_state, reward, done, info = env.step(action)
                traj.append((state, action, reward, next_state, done))

                # replay_buffer.append((state, action, reward, next_state, done))
                # if len(replay_buffer) > REPLAY_MEMORY:
                #     replay_buffer.popleft()
                if done:
                    show_result = reward
                state = next_state
                step_count += 1
                if step_count > 100:
                    break

            trajs.append(traj)
            if episode % 10 == 0:
                print("Episode: {} steps: {}  reward: {}".format(episode, step_count, show_result))
                # print(info)

            if episode % 10 == 1:
                if episode != 1:

                    # minibatch = random.sample(trajs, 10)
                    loss, _ = replay_train(mainDQN, trajs)
                    print("Loss: ", loss)
                    trajs = deque()
    def get_decoder(self, layer_of_activations="conv2d_15"):

        # Looping through the old model and popping the encoder part + encoded layer
        for i, l in enumerate(self.model.layers[0:19]):
            self.model.layers.pop(0)
            print(self.model.summary())

        # Building a clean model that is the exact same architecture as the decoder part of the autoencoder
        new_model = nb.build_decoder()

        # Looping through both models and setting the weights on the new decoder
        for i, l in enumerate(self.model.layers):
            print(i, l.name, l.output_shape)
            print(new_model.layers[i + 1].name,
                  new_model.layers[i + 1].output_shape)
            new_model.layers[i + 1].set_weights(l.get_weights())
        print(self.model.summary)
        print(new_model.summary(200))
        return new_model
Example #3
0
import NetworkBuilder
import os as os

net = NetworkBuilder.NetworkBuilder()

X, y = net.loadData()
y0 = y[:, 0]
y1 = y[:, 1]

mlp_model = net.generateMLP()

mlp_fit_model = net.fitModel(X, [y0, y1], mlp_model)

mlp_model = net.loadWeights(model, "0001.hdf5")

# examine model:
mlp_model.summary()

mlp_yhat = net.makePredictions(model, X)

#---------------------------------

lstm_model = net.generateLSTM()

# reshape for LSTM format
X = X.reshape(X.shape[0], 1, X.shape[1])

lstm_fit_model = net.fitModel(X, [y0, y1], lstm_model)

lstm_model = net.loadWeights(lstm_model, "0001.hdf5")
Example #4
0
def binary_activation(x):
    return nb.binary_activation(x)
Example #5
0
from bokeh.plotting import output_file, show
output_file("RS_System.html")


template = """<span href="#" data-toggle="tooltip" title="<%= value %>"><%= value %></span>"""
byButton = True
curr = 0

title = PreText(text = "Search Rich Context Archive")
rs_title = PreText(text = "Recommendation Results: ")

df = pd.read_csv('data/ICPSR_ARCHIVE.csv')
df = df.replace(np.nan, '', regex=True)
with open('data/dataset_focals.json') as fl:
    focals = json.load(fl)
G = nb.readNX('data/network_v2.5_contract.json')
all_keywords = [str(i) for i in G.nodes if (~str(i).startswith('data_')) & (~str(i).startswith('pub_')) & (~str(i).startswith('auth_'))]
output_file('ICPSR_Archive.html',mode='inline',root_dir=None)

source = ColumnDataSource(data=dict())
rs_source = ColumnDataSource(data=dict())


def update():    
    t = menu[nmenu.active][1]
    logging.info(t)
    current = df.sample(10)
    if search.value != "":
        if t != "":
            current = df.loc[(df.ID.str.startswith(t))&(df.title.str.contains(search.value, case=False)==True)]
        else:
Example #6
0
def getRecommendations(Search_By='Keyword',
                       search='',
                       metric='Jaccard',
                       G=None):
    all_res = pd.DataFrame()
    nodes = []
    if search == '':
        print('MISSING ARGUMENT: Specify Search Value!')
        return all_res
    elif G is None:
        G = nb.buildG()
        nb.addCommunity(G)

    allDatasets = [i for i in G.nodes if (str(i).startswith('data_'))]
    allPubs = [i for i in G.nodes if (str(i).startswith('pub_'))]
    all_keywords = [
        str(i) for i in G.nodes if (~str(i).startswith('data_'))
        & (~str(i).startswith('pub_')) & (~str(i).startswith('auth_'))
    ]
    all_dataTitles = dict(
        zip(datasets.title_lower,
            ['data_' + str(i) for i in datasets.data_set_id]))
    all_pubTitles = dict(
        zip(publications.title_lower,
            ['pub_' + str(i) for i in publications.publication_id]))

    if Search_By == 'Keyword':
        match = difflib.get_close_matches(search, all_keywords)
        if len(match) > 0:
            nodes = [
                i for i in G.neighbors(match[0]) if str(i).startswith('data_')
            ] if len(match) > 0 else []
            all_res = pd.DataFrame(
                [int(i.replace('data_', '')) for i in nodes],
                columns=['data_set_id'])
            all_res['score'] = 1.0
            nodes.extend([
                i for i in G.neighbors(match[0]) if str(i).startswith('pub_')
            ])
    elif Search_By == 'Dataset':
        match = difflib.get_close_matches(
            search,
            list(all_dataTitles.keys()) + list(all_dataTitles.values()))
        if len(match) > 0:
            nodes = [match[0]
                     ] if '_' in match[0] else [all_dataTitles[match[0]]]
    elif Search_By == 'Publication Paper':
        match = difflib.get_close_matches(
            search,
            list(all_pubTitles.keys()) + list(all_pubTitles.values()))
        if len(match) > 0:
            nodes = [match[0]
                     ] if '_' in match[0] else [all_pubTitles[match[0]]]
    else:
        print('MISSING ARGUMENT: Specify Search Criteria!')
        return all_res

#     if len(match)>0:
#         print(match[0])

    if all_res.shape[0] < 10:
        for n in nodes:
            res = getNodeSim(n, G, metric)
            all_res = res if res.shape[0] <= 0 else pd.concat([all_res, res],
                                                              sort=False)
        all_res = all_res.reset_index(drop=True)

    if all_res.shape[0] > 0:
        all_res = pd.merge(all_res,
                           datasets[['data_set_id', 'title', 'description']],
                           how='left',
                           on='data_set_id')
        all_res = all_res.sort_values(by='score',
                                      ascending=False).reset_index(drop=True)
        return all_res.iloc[:10]
    else:
        return all_res
Example #7
0
                   name='x')

## labels
y_true = tf.placeholder(tf.float32, shape=[None, num_classes], name='y_true')
y_true_cls = tf.argmax(y_true, dimension=1)


def create_weights(shape):
    return tf.Variable(tf.truncated_normal(shape, stddev=0.05))


def create_biases(size):
    return tf.Variable(tf.constant(0.05, shape=[size]))


nb = NetworkBuilder.NetworkBuilder()

with tf.name_scope("ModelV2") as scope:
    model = x
    model = nb.flatten(model)
    model = nb.attach_relu_layer(model)
    model = nb.attach_sigmoid_layer(model)
    model = nb.attach_dense_layer(model, 3)

y_pred = tf.nn.softmax(model, name='y_pred')

y_pred_cls = tf.argmax(y_pred, dimension=1)
session.run(tf.global_variables_initializer())
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits=model,
                                                        labels=y_true)
cost = tf.reduce_mean(cross_entropy)