def __init__(self, output_file, log_packets=True, log_queue_drops=True, log_arrivals=True, log_queue_lengths=False, log_states=False): """ Constructor. :param output_file: output file name. will be overwritten if already existing :param log_packets: enable/disable logging of packets (RECEIVED/CORRUPTED) :param log_queue_drops: enable/disable logging of packet drops :param log_arrivals: enable/disable logging of packet arrivals :param log_queue_lengths: enable/disable logging of queue lengths :param log_states: enable/disable logging of the state of nodes """ self.sim = sim.Sim.Instance() self.log_packets = log_packets self.log_queue_drops = log_queue_drops self.log_arrivals = log_arrivals self.log_queue_lengths = log_queue_lengths self.log_states = log_states # open the file path = locate(output_file) mkdir_for_file(path) self.log_file = open(path, "w") self.log_file.write("time,src,dst,event,size\n")
def main(): """ Process the data generated by one or more versions of the simulator. """ # compute the location of the CSV files csv_folder = locate('../output/') # compute the location for the processing results_folder = locate('../results/') mkdir(results_folder) # compute the statistics # use cache if available, otherwise load data from raw CSV aggregated_file = results_folder + 'statistics.h5' if not os.path.isfile(aggregated_file): print('Loading CSV files...') all_statistics = process_csv_raw_files(csv_folder) all_statistics.to_hdf(aggregated_file, 'statistics', format='fixed') else: print('Using cached statistics...') all_statistics = read_hdf(aggregated_file) # get rid of the seeds (take the average over all seeds) mean_stats = all_statistics \ .groupby(['id', 'propagation', 'simulator', 'p', 'dst', 'load', 'lambda'], as_index=False) \ .mean() \ .reset_index(level=3, drop=True) \ .drop('seed', 1) # make sure the plots folder exists plots_folder = locate('../results/plots/') mkdir(plots_folder) # plot graphs for each simulator print("Plotting individual statistics...") plots.individual_statistic(mean_stats, plots_folder) # compute aggregated statistic for each version of the simulator print("Aggregated stats by simulator and load...") pro = aggregate_statistics(mean_stats) print("Plotting aggregated statistics...") plots.aggregated_statistics(pro, plots_folder) # store aggregated statistic in a file pro.to_hdf(results_folder + 'summary.h5', 'summary', format='table')
def _on_import_dir(self, evt): dlg = wx.DirDialog(self, "Choose a directory:", style=wx.DD_DEFAULT_STYLE) if dlg.ShowModal() == wx.ID_OK: path = dlg.GetPath() paths = utils.locate(path) for path in paths: if path.endswith('.dcm') or path.endswith('.DCM'): self.client.import_query(path) dlg.Destroy()
def register_module(module, testClass): """ Register a python module and prepare a testClass with all contained scenario files. """ path = os.path.dirname(module.__file__) __import__(module.__name__+".rules") for f in locate('*.scenario', path): setattr( testClass, 'test_'+ normalize(f, path), create_test_wrap(open(f).xreadlines()) )
def main(): """ Plot the network topology. """ config = load_config('config.json') nodes = config['simulation']['nodes'][0] _range = config['simulation']['range'] # plot topology directory = locate('../plots/topology/') mkdir(directory) for f in ['png', 'eps']: draw_nodes(nodes, _range, directory + 'topology.' + f)
def handle(self, *args, **options): '''Read the Install Folder, and install its contents''' force = options.get('force') if args: if args[0] == "install": print "Installing resources from %s" % settings.INSTALL_FOLDER if not os.path.exists(settings.INSTALL_FOLDER): subprocess.call("mkdir -vp %s" % settings.INSTALL_FOLDER, shell=True) for resource_pack in utils.locate('*.zip', settings.INSTALL_FOLDER): print "######" * 5 print "INSTALLING: %s" % resource_pack if utils.ImportItem(resource_pack): print "INSTALLED: %s" % resource_pack else: print "ERROR: %s" % resource_pack else: pass else: print "You must provide at least 'install' as argument"
def collisions(path, name, _id=None): """ Load the collision rate of a single simulation. :param path: H5 file to load (relative location). :param name: Name of the simulation. :param _id: Optional: query the dataframe by id. :return: Triple <load, collision rate, name>. """ # load statistics statistics = pandas.read_hdf(utils.locate(path), 'summary') if _id is not None: statistics = statistics.query('id == "%s"' % _id) assert len(statistics.id.unique()) == 1 # extract the wanted data load = list(statistics.load) cr = list(statistics.cr) # return the triple return load, cr, name
def pdf_filepath(self, bibtexkey): ''' find a pdf filename that matches bibtexkey. ''' path = expanded_path('pdf', bibtexkey=bibtexkey) # print('pdf path', path) if path is not None: found = glob.glob(path, recursive=True) else: found = [] if not len(found) and config['preferences'].getboolean('pdf_locate'): fname = bibtexkey + '.pdf' found = locate(r'\/' + fname + '$', case_sensitive=True, as_regex=True) if len(found): return found[0] else: return None
def create_x_y(data, vocab, stats, test=False, verbose=False): ''' create input-output pairs for neural network training the input is (#examples, ''' data = filter_vocab(data, vocab, stats) max_span = stats['max_span'] max_q = stats['max_q'] surround_size = stats['surround_size'] neg_samples = stats['neg_samples'] ivocab = create_idict(vocab) X = [] verbose=0 def print_sentence(name, sen): if verbose: print(name, ' '.join([ivocab[v] for v in sen if v])) def map_vocab(word): if word in vocab: return vocab[word] else: return vocab['<unk>'] try: for paragraph in data: context = paragraph['context.tokens'] all_spans = sum(paragraph['spans'], []) for qa in paragraph['qas']: # extract question. q = np.zeros(max_q) for (i, word) in enumerate(qa['question.tokens']): if i >= len(q): break q[i] = map_vocab(word) def extract(pos, span, is_answer=False): if verbose: print('is_answer', is_answer) print_sentence('question', q) # extract span. s = np.zeros(max_span) for (i, word) in enumerate(span): if i >= len(s): break s[i] = map_vocab(word) print_sentence('span', s) # extract context left. answer_start = pos cl = np.zeros(surround_size) cr = np.zeros(surround_size) for i in range(surround_size): ind = answer_start - 1 - i if ind >= 0: cl[i] = map_vocab(context[ind]) print_sentence('cl', cl) for i in range(surround_size): ind = answer_start + len(span) + i if ind < len(context): cr[i] = map_vocab(context[ind]) print_sentence('cr', cr) if verbose: print() return (s, q, cl, cr) if not test: for answer in qa['answers']: X.append(extract(answer['answer_start'], answer['text.tokens'], is_answer=True) + (1.,)) spans = choice(all_spans, neg_samples, replace=True) #spans = all_spans if test: spans = all_spans for span in spans: pos = locate(context, span) X.append(extract(pos, span) + (0.,)) except Exception as e: print(e.message) traceback.print_exc() import pdb; pdb.set_trace(); #raise e if not test: random.shuffle(X) return X
def buildvrts(self): """Extracts image files and merges as necessary.""" layerIDs = [ name for name in os.listdir(self.mapsdir) if os.path.isdir(os.path.join(self.mapsdir, name)) ] if layerIDs == []: raise IOError, 'No files found' for layerID in layerIDs: (pType, iType, mType, cType) = self.decodeLayerID(layerID) filesuffix = cType.lower() layerdir = os.path.join(self.mapsdir, layerID) compfiles = [ name for name in os.listdir(layerdir) if (os.path.isfile(os.path.join(layerdir, name)) and name.endswith(filesuffix)) ] for compfile in compfiles: (compbase, compext) = os.path.splitext(compfile) fullfile = os.path.join(layerdir, compfile) datasubdir = os.path.join(layerdir, compbase) compfile = '%s.%s' % (compbase, iType) # tar (at least) expects Unix pathnames compimage = '/'.join([compbase, compfile]) cleanmkdir(datasubdir) if (Region.zipfileBroken == False): if (cType == "tgz"): cFile = tarfile.open(fullfile) elif (cType == "zip"): cFile = zipfile.ZipFile(fullfile) cFile.extract(compimage, layerdir) cFile.close() else: if (cType == "tgz"): cFile = tarfile.open(fullfile) cFile.extract(compimage, layerdir) elif (cType == "zip"): omfgcompimage = os.path.join(compbase, compfile) os.mkdir(os.path.dirname(os.path.join(datasubdir, compimage))) cFile = zipfile.ZipFile(fullfile) cFile.extract(omfgcompimage, datasubdir) os.rename(os.path.join(datasubdir, omfgcompimage), os.path.join(layerdir, compimage)) cFile.close() # convert tif to good SRS rawfile = os.path.join(layerdir, compbase, compfile) goodfile = os.path.join(layerdir, compbase, "%s.good%s" % (compbase, iType)) warpcmd = 'gdalwarp -q -multi -t_srs "%s" %s %s' % (Region.t_srs, rawfile, goodfile) os.system('%s' % warpcmd) vrtfile = os.path.join(layerdir, '%s.vrt' % layerID) buildvrtcmd = 'gdalbuildvrt %s %s' % (vrtfile, ' '.join(['"%s"' % x for x in locate('*.good*', root=layerdir)])) os.system('%s' % buildvrtcmd)