/
architect.py
executable file
·584 lines (573 loc) · 26.5 KB
/
architect.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
#!/usr/bin/env python
# Redistribution and use in any form is permitted
# by its original author Yuchen Hou. Enjoy!
from collections import OrderedDict
from multiprocessing import Pool
from shutil import copyfile
from ast import literal_eval
from os import devnull
from os import rename
from pprint import pprint
from shutil import copy
from cProfile import run
from fileinput import input
from itertools import combinations
from functools import reduce
from operator import mul
import random
from random import uniform
from shlex import split
from time import strftime
from datetime import datetime
from subprocess import call
from subprocess import check_call
from simpleai.search import SearchProblem
from simpleai.search.local import hill_climbing
from simpleai.search.local import hill_climbing_stochastic
from simpleai.search.local import hill_climbing_random_restarts
from simpleai.search.local import beam
from simpleai.search.local import beam_best_first
from simpleai.search.local import simulated_annealing
from sklearn.svm import SVR
from sklearn.svm import SVC
from sklearn.grid_search import GridSearchCV
from sklearn.preprocessing import scale
from sklearn.preprocessing import StandardScaler
from networkx import Graph
from networkx import DiGraph
from networkx import relabel_nodes
from networkx import nodes
from networkx import get_node_attributes
from networkx import get_edge_attributes
from networkx import neighbors
from networkx import is_connected
from networkx import is_strongly_connected
from networkx import diameter
from networkx import radius
from networkx import degree
from networkx import density
from networkx import draw
from networkx import draw_networkx_edge_labels
from networkx import complete_graph
from networkx import gnm_random_graph
from networkx import grid_2d_graph
from networkx import connected_watts_strogatz_graph
from networkx import navigable_small_world_graph
from networkx import to_numpy_matrix
from networkx import from_numpy_matrix
from networkx import to_dict_of_dicts
from networkx import to_edgelist
from networkx import shortest_path
from networkx import shortest_path_length
from networkx import average_shortest_path_length
from networkx import average_clustering
import numpy
from numpy import loadtxt
from numpy import savetxt
from numpy import delete
from numpy import arange
from numpy import asarray
from numpy import histogram
from numpy import zeros
from numpy import array
from numpy import full
from numpy import empty
from numpy import average
from numpy import repeat
from numpy import prod
from numpy import fill_diagonal
from numpy import vstack
from numpy import hstack
from numpy import tile
from numpy import hsplit
from numpy import vsplit
from numpy import logspace
from numpy import linspace
from numpy import squeeze
from numpy.linalg import norm
from numpy.random import rand
from numpy.random import randn
from scipy.spatial.distance import cityblock
from scipy.spatial.distance import euclidean
from scipy.stats import rv_discrete
import pandas
from pandas import read_csv
from pandas import DataFrame
from pandas import Series
from pandas import concat
from matplotlib import use
use('Agg')
from matplotlib.pyplot import figure
from matplotlib.pyplot import title
from matplotlib.pyplot import savefig
class Critic(object):
def evaluate_kernels(self, dataset):
data = actuator.load_data(dataset, range(performer.SAMPLE_COUNT))
kernels = ['linear', 'poly', 'rbf', 'sigmoid']
for kernel in kernels:
svr = SVR(kernel)
parameters = {'C':logspace(0, 2, 3).tolist()}
if kernel == 'poly':
parameters['degree'] = linspace(1, 4, 4, dtype = int).tolist()
if kernel == 'rbf':
parameters['gamma'] = logspace(-4, 0, 5).tolist()
estimator = GridSearchCV(svr, parameters, cv = 10, n_jobs = -1)
estimator.fit(data[2][:-10], data[1][:-10])
print 'kernel=', kernel,
print 'best_params=', estimator.best_params_,
print 'best_score=', estimator.best_score_
return
class Performer(object):
SIMULATOR = 'simulator.out'
ACCURACY = 'accuracy.tsv'
NODE_WEIGHT = 3
DIMENSION = 2
DEGREE_MAX = 7
ARCHITECTURES = ['mesh', 'random', 'small_world', 'optimum']
BENCHMARKS = ['fft', 'lu', 'radix', 'water', 'canneal', 'dedup', 'fluidanimate', 'vips']
TARGETS = ['latency', 'energy']
TARGET_TOKENS = ['Avg. Network Latency:', 'Average energy per message done:']
FEATURES = ['average_path_length',
'weighted_average_path_length',
'average_link_length',
'max_link_length',
'min_link_length',
'average_degree',
'max_degree',
'min_degree',
'edge_count',
'average_clustering',
'small_world_ness',
'alpha',
'beta',
'total_traffic']
ATTRIBUTES = TARGETS + FEATURES
NORMALIZED_ATTRIBUTES = ['normalized_' + a for a in ATTRIBUTES]
METADATA = ['time', 'architecture', 'benchmark', 'optimization_target', 'topology']
def extract_features(self, graph):
# print 'performer.extract_features:'
raw_features = [average_shortest_path_length(graph, 'weight'),
self.weighted_average_path_length(self.TRAFFIC, graph),
average(get_edge_attributes(graph, 'length').values()),
max(get_edge_attributes(graph, 'length').values()),
min(get_edge_attributes(graph, 'length').values()),
average(graph.degree().values()),
max(graph.degree().values()),
min(graph.degree().values()),
graph.number_of_edges(),
average_clustering(graph),
average_clustering(graph) / average_shortest_path_length(graph, 'weight'),
self.ALPHA,
self.BETA,
self.TOTAL_TRAFFIC]
return raw_features
def position(self, node_index):
node_position = (node_index / self.RADIX, node_index % self.RADIX)
return node_position
def link_length(self, source, destination):
return int(euclidean(self.position(source), self.position(destination)))
def edge_weight(self, source, destination):
return self.link_length(source, destination) + self.NODE_WEIGHT
def edge_index(self, source, destination):
index = source * self.NODE_COUNT + destination
return index
def edge_nodes(self, index):
nodes = (index / self.NODE_COUNT, index % self.NODE_COUNT)
return nodes
def file_name(self, quantity, index):
quantities = ['topology_view', 'link_lengths', 'average_hop_count']
quantities += self.ATTRIBUTES + self.NORMALIZED_ATTRIBUTES
if quantity in quantities:
name = quantity + '_' + str(index)
else:
raise NameError('no file for quantity: ' + quantity)
return name
def initialize(self, objective, architecture, radix, edge_count):
self.ARCHITECTURE = architecture
self.OBJECTIVE = objective
self.DATASET_TRAINING = 'dataset_optimum_design.tsv'
self.DATASET_DESIGN = 'dataset_' + architecture + '_design.tsv'
self.DATASET_TEST = 'dataset_' + architecture + '_test.tsv'
self.RADIX = radix
self.NODE_COUNT = self.RADIX ** self.DIMENSION
self.EDGE_COUNT = edge_count
self.distances = zeros((self.NODE_COUNT, self.NODE_COUNT))
self.edge_indices = zeros((self.NODE_COUNT, self.NODE_COUNT), int)
for node1 in range(self.NODE_COUNT):
for node2 in range(self.NODE_COUNT):
self.distances[node1][node2] = self.edge_weight(node1, node2)
self.edge_indices[node1][node2] = self.edge_index(node1, node2)
return
def reinitialize(self, thread_id, benchmark, optimization_target, alpha, beta):
if (optimization_target not in self.TARGETS):
raise NameError('unknown optimization_target: ' + optimization_target)
self.BENCHMARK = benchmark
self.THREAD_ID = thread_id
self.TOPOLOGY = 'topology_' + str(thread_id) +'.tsv'
self.SIMULATION_LOG = 'simulation_' + str(thread_id) + '.log'
self.OPTIMIZATION_TARGET = optimization_target
self.TRAFFIC_FILE = 'traffic_' + benchmark + '.tsv'
if self.RADIX == 8:
raw_traffic = loadtxt(self.TRAFFIC_FILE)
else:
raw_traffic = rand(self.NODE_COUNT,self.NODE_COUNT)
raw_traffic += raw_traffic.transpose()
self.TOTAL_TRAFFIC = raw_traffic.sum().sum()
self.TRAFFIC = raw_traffic
self.ALPHA = alpha
self.BETA = beta
super_distances = self.distances ** (- alpha)
fill_diagonal(super_distances, 0)
raw_probabilities = (super_distances) * (raw_traffic ** beta)
self.probabilities = raw_probabilities / (raw_probabilities.sum().sum())
print 'performer.initialize:', self.BENCHMARK, self.NODE_COUNT, self.EDGE_COUNT, self.TOTAL_TRAFFIC
return
def combine_traffic(self):
combined_traffic = zeros((self.NODE_COUNT, self.NODE_COUNT))
for benchmark in self.BENCHMARKS:
traffic_file = 'traffic_' + benchmark + '.tsv'
raw_traffic = loadtxt(traffic_file)
combined_traffic += (raw_traffic / raw_traffic.sum().sum())
combined_traffic /= combined_traffic.sum().sum()*10
print combined_traffic.sum().sum()
savetxt('traffic_combined.tsv', combined_traffic, fmt='%f', delimiter='\t')
return
def update_estimators(self, accuracy):
data = self.load_data(self.DATASET_TRAINING, range(len(self.ATTRIBUTES)))
c_range = accuracy
gamma_range = accuracy
parameters = {'C' : logspace(0, c_range, c_range+1).tolist(),
'gamma' : logspace(- gamma_range, 0, gamma_range+1).tolist()}
estimators = []
svrs = []
data_instance = [datetime.now(), self.BENCHMARK]
for i in range(len(self.TARGETS)):
svrs.append(SVR('rbf'))
estimators.append(GridSearchCV(svrs[i], parameters, n_jobs = -1))
estimators[i].fit(data[len(self.TARGETS)], data[i])
data_instance += [estimators[i].best_params_, estimators[i].best_score_]
print 'performer.update_estimators: benchmark =', self.BENCHMARK+';', data_instance
with open(self.ACCURACY, 'a') as f:
f.write('\t'.join(map(str, data_instance)) + '\n')
self.estimators = estimators
return
def center(self, graph, source, destination):
center = [source, destination]
for i in range(self.DIMENSION):
center[i] = .5*(graph.node[source]['position'][i] + graph.node[destination]['position'][i])
return center
def constraints_satisfied(self, graph):
degree_max = max(graph.degree().values())
# print 'performer.constraints_satisfied:', degree_max, graph.number_of_edges(), is_connected(graph)
if graph.number_of_edges() <= self.EDGE_COUNT and degree_max <= self.DEGREE_MAX and is_connected(graph):
return True
else:
return False
def process_graph(self, graph):
graph.remove_edges_from(graph.selfloop_edges())
for node_key, node_attributes in graph.nodes(data=True):
node_attributes['position'] = self.position(node_key)
for source, destination, edge_attributes in graph.edges(data=True):
edge_attributes['length'] = self.link_length(source, destination)
edge_attributes['weight'] = self.edge_weight(source, destination)
return
def key_mapping(self, tuple_key):
new_key = tuple_key[0] * self.RADIX + tuple_key[1]
return new_key
def generate_grid_graph(self):
print 'performer.generate_grid_graph:', self.NODE_COUNT, self.EDGE_COUNT
tuple_keyed_graph = grid_2d_graph(self.RADIX, self.RADIX)
graph = relabel_nodes(tuple_keyed_graph, self.key_mapping)
self.process_graph(graph)
return graph
def generate_random_graph(self):
print 'performer.generate_random_graph:', self.NODE_COUNT, self.EDGE_COUNT
while True:
graph = gnm_random_graph(self.NODE_COUNT, self.EDGE_COUNT)
if self.constraints_satisfied(graph):
self.process_graph(graph)
return graph
def generate_small_world_graph(self):
max_edges = self.NODE_COUNT*(self.NODE_COUNT-1)/2
if self.EDGE_COUNT > max_edges:
return complete_graph(self.NODE_COUNT)
graph = Graph()
graph.add_nodes_from(range(self.NODE_COUNT))
edges = performer.edge_indices.flatten()
probabilities = performer.probabilities.flatten()
for trial in range(len(edges)-9):
edge_index = numpy.random.choice(edges, p=probabilities)
source, destination = self.edge_nodes(edge_index)
graph.add_edge(source, destination, length = self.link_length(source, destination),
weight = self.edge_weight(source, destination))
probabilities[edge_index] = 0
probabilities /= sum(probabilities)
if max(graph.degree().values()) > self.DEGREE_MAX:
graph.remove_edge(source, destination)
if graph.number_of_edges() > self.EDGE_COUNT:
victim = random.choice(graph.edges())
graph.remove_edge(victim[0], victim[1])
if self.constraints_satisfied(graph):
print 'performer.generate_small_world_graph:',
print self.BENCHMARK, self.NODE_COUNT, self.EDGE_COUNT, trial
self.process_graph(graph)
return graph
def weighted_average_path_length(self, traffic, graph):
# print 'performer.weighted_average_path_length:', self.BENCHMARK, self.NODE_COUNT, self.EDGE_COUNT
raw_path_lengths = shortest_path_length(graph, weight = 'weight')
path_lengths = zeros((self.NODE_COUNT, self.NODE_COUNT))
for source in raw_path_lengths:
for destination in raw_path_lengths[source]:
path_lengths[source][destination] = raw_path_lengths[source][destination]
weighted_average = average(path_lengths, weights = traffic)
return weighted_average
def load_data(self, dataset, columns):
raw_dataset = loadtxt(dataset, usecols = columns, skiprows = 1)
self.scaler = StandardScaler()
self.scaler.fit(raw_dataset)
scaled_dataset = self.scaler.transform(raw_dataset)
split_dataset = map(squeeze, hsplit(scaled_dataset, range(1,len(self.TARGETS)+1)))
return split_dataset
def estimate_metrics(self, raw_features):
raw_sample = asarray(range(len(self.TARGETS)) + raw_features)
scaled_sample = self.scaler.transform(raw_sample)
for i in range(len(self.TARGETS)):
scaled_sample[i] = (self.estimators[i].predict(scaled_sample[len(self.TARGETS):])).tolist()[0]
estimated_raw_sample = self.scaler.inverse_transform(asarray(scaled_sample)).tolist()
estimated_metrics = estimated_raw_sample[:len(self.TARGETS)]
return estimated_metrics
def evaluate_quality(self, raw_targets):
if self.OPTIMIZATION_TARGET == 'latency':
return -raw_targets[0]
elif self.OPTIMIZATION_TARGET == 'energy':
return -raw_targets[1]
else:
raise NameError('unknown optimization_target')
def extract_targets(self):
metrics = [None] * len(performer.TARGETS)
with open(self.SIMULATION_LOG, 'r') as f:
for line in f:
for index in range(len(performer.TARGET_TOKENS)):
if line.startswith(performer.TARGET_TOKENS[index]):
metrics[index] = float(line.replace(performer.TARGET_TOKENS[index], ''))
for metric in metrics:
if metric == None:
print 'performer.extract_targets: targets not found; simulation failed'
return None
return metrics
def evaluate_metrics(self, graph):
self.configure_topology(self.TOPOLOGY, graph)
with open(self.SIMULATION_LOG, 'w+') as f:
call([self.SIMULATOR, self.BENCHMARK, self.TOPOLOGY], stdout = f)
metrics = self.extract_targets()
# print 'performer.evaluate_metrics:', self.BENCHMARK, self.NODE_COUNT, self.EDGE_COUNT, metrics
return metrics
def string_to_graph(self, graph_string):
# print 'performer: string_to_graph: graph_string =', graph_string
graph = Graph(literal_eval(graph_string))
self.process_graph(graph)
return graph
def update_database(self, dataset, architecture, graph):
metrics = performer.evaluate_metrics(graph)
if metrics == None:
return
print 'performer.update_database:', self.BENCHMARK, architecture, metrics
metadata = [datetime.now(), architecture, self.BENCHMARK, self.OPTIMIZATION_TARGET, to_dict_of_dicts(graph)]
design_instance = metrics + self.extract_features(graph) + metadata
with open(dataset, 'a') as f:
f.write('\t'.join(map(str, design_instance)) + '\n')
return
def configure_topology(self, topology_file, graph):
# print 'performer.configure_topology:', self.BENCHMARK, self.NODE_COUNT, self.EDGE_COUNT
adjacency = to_numpy_matrix(graph, dtype = int, nonedge = -1)
# print adjacency.astype(int)
all_disconnected = full((self.NODE_COUNT, self.NODE_COUNT), -1, int)
side = full((self.NODE_COUNT, self.NODE_COUNT), -1, int)
fill_diagonal(side, 2)
configuration = hstack((vstack((all_disconnected, side)), vstack((side, adjacency))))
configuration = configuration.astype(int)
savetxt(topology_file, configuration, fmt='%d', delimiter='\t')
return
def initialize_files(self, dataset):
columns = self.ATTRIBUTES + self.METADATA
with open(dataset, 'w+') as f:
f.write('\t'.join(map(str, columns)) + '\n')
return
def draw_graph(self, title_name, graph, architecture):
figure()
title(title_name)
draw(graph, get_node_attributes(graph, 'position'), hold = True)
# draw_networkx_edge_labels(graph, get_node_attributes(graph, 'position'), alpha = 0.2)
savefig(performer.file_name('topology_view', architecture))
return
def plot_bar(self, dataframe, index, columns, attribute):
data = dataframe[[index, columns, attribute]]
print 'performer.plot_bar: ', index, columns, attribute
data = data.pivot(index, columns, attribute)
ymin = None
if attribute == 'latency':
ymin = 70
if attribute == 'energy':
ymin = 3e-10
if attribute == 'average_path_length':
ymin = 10
if attribute == 'average_hop_count':
ymin = 2
axis = data.plot(kind ='bar', edgecolor='none', rot=0, ylim=ymin)
axis.set_ylabel(attribute)
axis.legend(loc='upper left', bbox_to_anchor=(1,1))
axis.get_figure().savefig(self.file_name(attribute, index), bbox_inches='tight')
return
def plot_histogram(self, dataframe, column, value):
figure()
distributions = DataFrame()
for index1, row in dataframe.iterrows():
bin_count = max(row[value]) + 3
new_column = DataFrame({row[column]: Series(histogram(row[value], bins = range(bin_count))[0])})
distributions = concat([distributions, new_column], axis = 1)
print 'performer.plot_histogram: ', column, value
axis = distributions.plot(kind = 'bar', edgecolor = 'none', rot=0)
axis.set_xlabel(value)
axis.get_figure().savefig(self.file_name(value, 'distribution'))
return
def plot_trace(self, data):
result = DataFrame()
attributes = ['benchmark'] + self.TARGETS
for benchmark in self.BENCHMARKS:
evolution = data[data['benchmark'] == benchmark][attributes].cummin().reindex()
evolution['trial'] = evolution.index
result = concat([result, evolution], ignore_index = True)
for metric in self.TARGETS:
self.plot_line(result, 'trial', 'benchmark', metric)
return
def plot_figures(self, results):
for attribute in ['latency', 'energy', 'average_path_length', 'average_hop_count']:
performer.plot_bar(results, 'benchmark', 'architecture', attribute)
mask = results['benchmark'] == 'canneal'
performer.plot_histogram(results[mask], 'architecture', 'link_lengths')
for architecture in performer.ARCHITECTURES:
optimum_data = results[results['architecture'] == architecture].iloc[0]
performer.draw_graph(optimum_data['architecture'], optimum_data['graph'], architecture)
return
performer = Performer()
class Optimization(SearchProblem):
def actions(self, state):
successors = []
successor1 = state.copy()
while successor1.number_of_edges() >= performer.EDGE_COUNT:
victim = random.choice(successor1.edges())
successor1.remove_edge(victim[0], victim[1])
for source in state.nodes():
for destination in state.nodes():
successor = successor1.copy()
successor.add_edge(source, destination, length = performer.link_length(source, destination),
weight = performer.edge_weight(source, destination))
successor.remove_edges_from(successor.selfloop_edges())
if performer.constraints_satisfied(successor):
successors.append(successor)
print 'optimization.actions:', len(successors)
return successors
def result(self, state, action):
return action
def value(self, state):
raw_features = performer.extract_features(state)
estimated_metrics = performer.estimate_metrics(raw_features)
estimated_quality = performer.evaluate_quality(estimated_metrics)
return estimated_quality
# return (-performer.weighted_average_path_length(performer.TRAFFIC, state))
# return (-average_shortest_path_length(state, 'weight'))
def analyze():
data = DataFrame()
for architecture in performer.ARCHITECTURES:
performer.initialize('test', architecture, 8, 112)
print performer.DATASET_TEST
data1 = read_csv(performer.DATASET_TEST, sep='\t')
data = concat([data, data1], ignore_index=True)
data.sort('benchmark', inplace = True)
print 'analyze:', data.columns.values
# results = data.ix[data.groupby(['architecture', 'benchmark'])['latency'].idxmin()]
results = data
results['graph'] = [performer.string_to_graph(t) for t in results['topology']]
results['average_hop_count'] = [average_shortest_path_length(g) for g in results['graph']]
results['link_lengths'] = [get_edge_attributes(g, 'length').values() for g in results['graph']]
mask = (results['architecture'] == 'small_world') | (results['architecture'] == 'optimum')
print data[mask][['architecture', 'benchmark', 'latency']]
performer.plot_figures(results)
# for normalized_attribute, attribute in zip(performer.NORMALIZED_ATTRIBUTES, performer.ATTRIBUTES):
# normlized_values = []
# for index, row in results.iterrows():
# mesh_index = (results['architecture'] == 'mesh') & (results['benchmark'] == row['benchmark'])
# normlized_values.append(row[attribute]/squeeze(results[mesh_index][attribute]))
# results[normalized_attribute] = normlized_values
return
def design(thread_id):
# performer.initialize_files(performer.DATASET_DESIGN)
architecture = performer.ARCHITECTURE
# benchmark = 'combined'
for benchmark in performer.BENCHMARKS:
performer.reinitialize(thread_id, benchmark, 'latency', uniform(0, 8), 0)
# performer.update_estimators(4)
if architecture == 'mesh':
graph = performer.generate_grid_graph()
elif architecture == 'random':
graph = performer.generate_random_graph()
elif architecture == 'small_world':
graph = performer.generate_small_world_graph()
elif architecture == 'test':
raw_topology = loadtxt('topology_test.tsv', int)[-performer.NODE_COUNT:, -performer.NODE_COUNT:]
graph = from_numpy_matrix(raw_topology + 1)
performer.process_graph(graph)
elif architecture == 'optimum':
optimization = Optimization(initial_state=performer.generate_small_world_graph())
# optimization = Optimization(initial_state=performer.generate_grid_graph())
final = hill_climbing(optimization)
graph = final.state
else:
raise NameError('unknown architecture: ' + architecture)
if graph != None:
performer.update_database(performer.DATASET_DESIGN, architecture, graph)
return
def infinite_design(thread_id):
while True:
design(thread_id)
def test(benchmark):
performer.initialize_files(performer.DATASET_TEST)
data = read_csv(performer.DATASET_DESIGN, sep='\t')
results = data.ix[data['latency'].idxmin()]
data.sort('average_path_length', inplace = True)
graph = performer.string_to_graph(results['topology'])
performer.reinitialize(benchmark, benchmark, 'latency', uniform(0, 8), 0)
performer.update_database(performer.DATASET_TEST, performer.ARCHITECTURE, graph)
return
def combine():
data = read_csv(performer.DATASET_DESIGN, sep='\t')
data = data.ix[data.groupby(['architecture', 'benchmark'])['latency'].idxmin()]
data = data[['benchmark', 'latency']]
print data
def view():
data = read_csv('dataset_small_world_design.tsv', sep = '\t')
print data.columns.values
data.sort('average_path_length', inplace = True)
print data[['architecture', 'benchmark', 'latency', 'average_path_length', 'alpha', 'beta']]
return
if __name__ == '__main__':
objective = 'combine'
performer.initialize(objective, 'optimum', 8, 112)
thread_count = 24
pool = Pool(thread_count)
if objective == 'design':
# design(8)
pool.map(design, range(thread_count))
if objective == 'infinite_design':
pool.map(infinite_design, range(thread_count))
if objective == 'test':
# for benchmark in performer.BENCHMARKS:
# test(benchmark)
pool.map(test, performer.BENCHMARKS)
if objective == 'analyze':
analyze()
check_call(['pdflatex', 'architect'])
if objective == 'view':
view()
if objective == 'combine':
combine()