Exemplo n.º 1
0
def uploaded_data(content=None):
    if content is not None:
        df = utils.parse_data(content)
        freq_plot, time_plot = utils.generate_plot(df)
        return freq_plot, time_plot
    else:
        raise dash.exceptions.PreventUpdate
Exemplo n.º 2
0
def update_graph(selected_graph, selected_channel, selected_centrality,
                 selectedCommunity, selectedData):
    ctx = dash.callback_context
    button_id = ctx.triggered[0]['prop_id'].split('.')[0]
    print(button_id)
    graph_label = selected_graph.rsplit('/', 1)[-1]
    if button_id == 'graphs':
        global df_neo4j
        query = "match (nodes:{}) RETURN nodes".format(graph_label.lower())
        graph = py2neo.Graph(bolt=True,
                             host='localhost',
                             user='******',
                             password='******')
        df_neo4j = graph.run(query).to_data_frame()
    persons = []
    tempo_fig = generate_plot(selected_graph)
    #if not ctx.triggered or button_id in ['graphs', 'ddn_channels']:
    nodelink_fig = neograph(selected_graph, selected_channel,
                            selected_centrality, selectedCommunity)
    if button_id == 'transaction_plot':
        print('Filter operation')
        start_date = selectedData['range']['x'][0]
        end_date = selectedData['range']['x'][1]
        nodelink_fig = neograph_filtered(selected_graph, selected_channel,
                                         selected_centrality,
                                         selectedCommunity, start_date,
                                         end_date)
        return dash.dash.no_update, nodelink_fig

    centrality = selected_channel[:-1].lower() + selected_centrality
    community = selected_channel[:-1].lower() + selectedCommunity
    if selected_centrality != 'None':
        persons = df_neo4j.nodes.apply(pd.Series).sort_values([centrality]).pid
    if selectedCommunity != 'None':
        persons = df_neo4j.nodes.apply(pd.Series).sort_values([community]).pid
    if selected_centrality != 'None' and selectedCommunity != 'None':
        persons = df_neo4j.nodes.apply(pd.Series).sort_values(
            [community, centrality]).pid
    #if button_id in ['ddn_centrality', 'ddn_community']:
    tempo_fig.update_layout(yaxis=dict(categoryarray=persons))
    #return tempo_fig, dash.dash.no_update
    return tempo_fig, nodelink_fig
Exemplo n.º 3
0

for epoch in range(num_epochs):
    for batch_i, (real_images, _) in enumerate(train_loader):
        batch_size = real_images.size(0)

        # transform real image data from [0, 1) to [-1, 1)
        real_images = real_images * 2 - 1

        d_loss = train_discriminator(real_images, d_optim, batch_size, z_size)
        g_loss = train_generator(g_optim, batch_size, z_size)

        # Print some loss stats
        if batch_i % print_every == 0:
            # print discriminator and generator loss
            print('Epoch [{:5d}/{:5d}] | d_loss: {:6.4f} | g_loss: {:6.4f}'.
                  format(epoch + 1, num_epochs, d_loss.item(), g_loss.item()))

    losses.append((d_loss.item(), g_loss.item()))

    # generate and save sample, fake images
    G.eval()  # eval mode for generating samples
    samples_z = G(fixed_z)
    samples.append(samples_z)
    G.train()  # back to train mode

with open('train_samples.pkl', 'wb') as f:
    pkl.dump(samples, f)

generate_plot(losses)
Exemplo n.º 4
0
startTime = time.time()

#1. Initialize Tree and growth
print("Initializing Fixed Node Tree.....")
tree = Tree(start, goal, obstacles, xmin, ymin, xmax, ymax, maxNumNodes,
            resolution, eta, gamma, epsilon)

#2. Set pcurID = 0; by default in Tree instantiation

#3. Get Solution Path
solPath, solPathID = tree.initGrowth(exhaust=True, FN=True)

####################
# Plot
if plot_and_save_gif:
    im = utils.generate_plot(tree, solPath)
    # Appending to list of images
    images.append(im)
####################

#4. Init movement()-->> update pcurID
solPath, solPathID, dt = tree.nextSolNode(solPath, solPathID)

#5. Begin replanning loop, while pcur is not goal, do...
while np.linalg.norm(tree.nodes[tree.pcurID, 0:2] - tree.goal) > tree.epsilon:
    if plot_and_save_gif:
        im = utils.generate_plot(tree, solPath)
        # Appending to list of images
        images.append(im)

    #6. Obstacle Updates
Exemplo n.º 5
0
raw_path = 'F:/onNotOn_raw/zy_onNoton_raw.p'

# utility directory to save the pyplots
radar_3dscatter_path = '/Users/hanfei/figures/plots'

radar_data = list(pickle.load(open(radarData_path, 'rb')).items())
radar_data.sort(key=lambda x: x[0])  # sort by timestamp
videoData_list = os.listdir(videoData_path)
videoData_timestamps = list(
    map(lambda x: float(x.strip('.jpg')), videoData_list))

style.use('fivethirtyeight')
white_color = 'rgb(255, 255, 255)'
black_color = 'rgb(0, 0, 0)'
red_color = 'rgb(255, 0, 0)'

DBSCAN_esp = 0.2
DBSCAN_minSamples = 3

# input data for the classifier that has the shape n*4*100, n being the number of samples
num_padding = 50
data_for_classifier = np.zeros((len(radar_data), num_padding, 4))
data_for_classifier_flattened = np.zeros(
    (len(radar_data), 1, 4 * num_padding + 1))

fnt = ImageFont.truetype("Arial.ttf", 16)

generate_plot(radar_data, videoData_timestamps, videoData_path, DBSCAN_esp,
              DBSCAN_minSamples, num_padding, fnt, radar_3dscatter_path,
              mergedImg_path)
Exemplo n.º 6
0
Arquivo: analyze.py Projeto: mcai/heo
            for reinforcement_factor in reinforcement_factor_range:
                results.append(
                    parse_result(working_directory(traffic, '', max_cycles,
                                                   num_nodes, 'OddEven', 'ACO',
                                                   data_packet_injection_rate,
                                                   aco_selection_alpha,
                                                   reinforcement_factor),
                                 bench=traffic))

    csv_file_name = 'results/synthesized_general_' + str(
        data_packet_injection_rate) + '.csv'

    results_to_csv(results, csv_file_name)

    generate_plot(
        csv_file_name, 'results/synthesized_simulation_time_' +
        str(data_packet_injection_rate), 'Benchmark',
        'Simulation Time (Seconds)', 'Algorithm', 'Simulation Time (Seconds)')

    generate_plot(
        csv_file_name,
        'results/synthesized_throughput_' + str(data_packet_injection_rate),
        'Benchmark', 'Throughput', 'Algorithm', 'Throughput')

    generate_plot(
        csv_file_name, 'results/synthesized_average_packet_delay_' +
        str(data_packet_injection_rate), 'Benchmark', 'Average Packet Delay',
        'Algorithm', 'Average Packet Delay')

    generate_plot(
        csv_file_name, 'results/synthesized_payload_throughput_' +
        str(data_packet_injection_rate), 'Benchmark', 'Payload Throughput',
Exemplo n.º 7
0
    parser = ArgumentParser(formatter_class=RawTextHelpFormatter)

    parser.add_argument("file",
                        metavar="file",
                        type=str,
                        help="Google takeout JSON file.")
    parser.add_argument("-s",
                        "--size",
                        dest="size",
                        type=int,
                        required=False,
                        help="Number of top sites to be displayed.",
                        default=20)
    parser.add_argument("-d",
                        "--days",
                        dest="days",
                        type=int,
                        required=False,
                        help="Number of x last days of data to be displayed.",
                        default=60)

    args = parser.parse_args()

    print("(1/2): Processing data")
    data = []
    data.append(chart_json(args.file, args.days))
    data.append(hist_json(args.file, args.days))

    print("(2/2): Generating graph")
    generate_plot(data, args.size, args.days)
Exemplo n.º 8
0
def train(data, labels, model):
    history = model.fit(data, labels, epochs=150, batch_size=10)
    generate_plot(history, 'loss')
    model.save(MODEL_PATH)