def QuerySchema(self, descriptors): (aggregatorScript, sourceIds) = self._AggregatorScriptFor(descriptors) aggregator = Aggregator(cStringIO.StringIO(aggregatorScript)) result = Schema() aggregator.run( result, [self.sources[sourceId].QuerySchema() for sourceId in sourceIds]) return result
def test_aggregate(self): outFileName = "/Users/Tushar/Documents/Research/PuppetQuality/Puppet-lint_aggregator/testOut.csv" outFile = open(outFileName, 'w') outFile.write(CONSTS.HEADER) Aggregator.aggregate("/Users/Tushar/Documents/Research/PuppetQuality/Puppet-lint_aggregator/test1/", "test1", outFile) Aggregator.aggregate("/Users/Tushar/Documents/Research/PuppetQuality/Puppet-lint_aggregator/test2/", "test2", outFile) outFile.close() outReadFile = open(outFileName, 'r') self.assertGreater(len(outReadFile.read()), 0)
def test_aggregate(self): outFileName = "/Users/Tushar/Documents/Research/PuppetQuality/Puppet-lint_aggregator/testOut.csv" outFile = open(outFileName, 'w') outFile.write(CONSTS.HEADER) Aggregator.aggregate( "/Users/Tushar/Documents/Research/PuppetQuality/Puppet-lint_aggregator/test1/", "test1", outFile) Aggregator.aggregate( "/Users/Tushar/Documents/Research/PuppetQuality/Puppet-lint_aggregator/test2/", "test2", outFile) outFile.close() outReadFile = open(outFileName, 'r') self.assertGreater(len(outReadFile.read()), 0)
def UpdateDescriptors(self, id, pool, descriptors=None): if descriptors == None: descriptors = pool.PresentAttributes() scripts = self._DisgregatorScripts(descriptors) for source, script in scripts.items(): if script == "": continue disgregator = Aggregator(cStringIO.StringIO(script)) try: result = self.sources[source].QueryDescriptors(id) except: result = Pool() disgregator.run(result, [pool]) self.sources[source].UpdateDescriptors(id, result)
def QueryDescriptors(self, id, descriptors): if self.verbose: print "++ Building aggregation script..." (aggregatorScript, sourceIds) = self._AggregatorScriptFor(descriptors) aggregator = Aggregator(cStringIO.StringIO(aggregatorScript)) result = Pool() sourcesPools = [] for sourceId in sourceIds: if self.verbose: print "++ Querying descriptors from %s..." % sourceId sourcePool = self.sources[sourceId].QueryDescriptors(id) sourcesPools.append(sourcePool) if self.verbose: print "++ Aggregating..." aggregator.run(result, sourcesPools) return result
def load(): global Ci_Database global C_Ci_Database global P_list global Ci_Accumulated_Weights global Ci_Accumulated_Distance global C_token_Database global token_Database global token_Accumulated_Weights global token_Accumulated_Distance global target_context results = Aggregator.load() Ci_Database, C_Ci_Database, P_list, Ci_Accumulated_Weights, Ci_Accumulated_Distance, C_token_Database, token_Database, token_Accumulated_Weights, token_Accumulated_Distance, target_context = results P_list = set(P_list) target_context = filterContext(target_context)
import os import Aggregator from SmellDetector import Constants as CONSTS, Analyzer root = CONSTS.REPO_ROOT print("Initiating Analyzer...") totalRepos = len(os.listdir(root)) currentItem = 0 for item in os.listdir(root): currentFolder = os.path.join(root, item) #print("Anlyzing: " + currentFolder) if not os.path.isfile(currentFolder): Analyzer.analyze(currentFolder, item) currentItem += 1 print (str("{:.2f}".format(float(currentItem * 100)/float(totalRepos))) + "% analysis done.") print("Analyzer - Done.") print("Initiating metrics and smells aggregator...") aggregatedFile = open(root + "/" + CONSTS.AGGREGATOR_FILE, 'wt') aggregatedFile.write(CONSTS.CSV_HEADER) for item in os.listdir(root): currentFolder = os.path.join(root, item) if not os.path.isfile(currentFolder): Aggregator.aggregate(currentFolder, item, aggregatedFile) aggregatedFile.close() print("Metrics and smells aggregator - Done.")
############################################################################### else: status = MPI.Status() while True: # receive message work = comm.recv(source=0, tag = MPI.ANY_TAG,status=status) # check tag of received message if status.Get_tag() == DIETAG: break # do the work resultz = np.array(work) cosa = max(resultz.shape) #resultz[0,:].shape print('cosa', cosa) if cosa==5: # cosa[0] resultpp = resultz[np.ix_([0],[1,2,3])] state_agg = resultz[np.ix_([0],[4])] resultp = Aggregator.mapping_inverse(resultpp,state_agg) #np.ones(3) elif cosa==8: #cosa[0] resultpp = resultz[np.ix_([0],[1,2,3])] state_agg = resultz[np.ix_([0],[4])] pred = resultz[np.ix_([0],[5,6,7])] (resultp,actives) = Aggregator.mapping(resultpp,state_agg,pred) #print(actives) else: resultpp = resultz[np.ix_([0],[1,2,3])] state_agg = resultz[np.ix_([0],[4])] pred = resultz[np.ix_([0],[5,6,7])] (resultp,actives) = Aggregator.mapping(resultpp,state_agg,pred) mm = 1.0 - actives #resultp = np.c_[mm,[valls]] resulto = np.array(resultz[0][0])
if sanity_check_counter == int(sanity_interval / interval): content = previous_content sanity_check_counter = 0 if len(content) != 0: socketio.emit('newdata', content, namespace='/api') socketio.sleep(interval) @app.route('/') def hello_world(): return render_template('index.html') @socketio.on('slider', namespace='/api') def slider(data): print('slider value updated: %s' % data.get('value')) @socketio.on('connect', namespace='/api') def connect(): socketio.emit('newdata', aggregator.get_content(), namespace='/api') if __name__ == '__main__': aggregator = Aggregator.Aggregator() command = sys.argv[1] if len(sys.argv) > 1 else "python3 ./test.py" aggregator.register_component(command) aggregator.start_gathering() socketio.start_background_task(target=update) socketio.run(app, host='0.0.0.0', port=8080)
# (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA from Aggregator import * from Schema import * import sys if sys.argv[1] == "-" : script = sys.stdin else : script = file(sys.argv[1]) sources = sys.argv[2:] target = Schema() aggregator = Aggregator(script) aggregator.run(target, [ Schema(file(source)) for source in sources ] ) target.Dump(sys.stdout)
def test(self): t = tableau_test() a = Aggregator() a.init(self.f.compute) print a.result(t)
import os import Aggregator from SmellDetector import Constants as CONSTS, Analyzer root = CONSTS.REPO_ROOT print("Initiating Analyzer...") totalRepos = len(os.listdir(root)) currentItem = 0 for item in os.listdir(root): currentFolder = os.path.join(root, item) #print("Anlyzing: " + currentFolder) if not os.path.isfile(currentFolder): Analyzer.analyze(currentFolder, item) currentItem += 1 print( str("{:.2f}".format(float(currentItem * 100) / float(totalRepos))) + "% analysis done.") print("Analyzer - Done.") print("Initiating metrics and smells aggregator...") aggregatedFile = open(root + "/" + CONSTS.AGGREGATOR_FILE, 'wt') aggregatedFile.write(CONSTS.CSV_HEADER) for item in os.listdir(root): currentFolder = os.path.join(root, item) if not os.path.isfile(currentFolder): Aggregator.aggregate(currentFolder, item, aggregatedFile) aggregatedFile.close() print("Metrics and smells aggregator - Done.")
#Program Entry Point from Generator import * import tensorflow as tf from Discriminator import * from Aggregator import * from DataPrep import * #Add support for altering images. (ie flip image. etc..) #All ops are for 3d tensors, so something like this has to be used.. #result = tf.map_fn(lambda img: tf.image.random_flip_left_right(img), images) #Add leaky relu with tf.Session() as sess: batchSize = 64 numIters = 500 gen = Generator(batchSize) discrim = Discriminator(batchSize, gen) a = Aggregator(sess, discrim) saver = tf.train.Saver() try: saver.restore(sess, "savedModel.ckpt") print("Successfully Restored Model!!") except: sess.run(tf.global_variables_initializer()) print("No model available for restoration") allData = loadAllData() a.learn(allData, numIters, batchSize) saver.save(sess, "savedModel.ckpt")
def helperTestParser(self, input): aggregator = Aggregator(cStringIO.StringIO(input)) sink = cStringIO.StringIO() aggregator.dump(sink) return sink.getvalue()
def helperTestParser(self, input) : aggregator = Aggregator(cStringIO.StringIO(input)) sink = cStringIO.StringIO() aggregator.dump(sink) return sink.getvalue()
# # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA from Aggregator import * from Pool import * import sys if sys.argv[1] == "-": script = sys.stdin else: script = file(sys.argv[1]) sources = sys.argv[2:] target = Pool() aggregator = Aggregator(script) aggregator.run(target, [Pool(file(source)) for source in sources]) target.Dump(sys.stdout)
7.You can freely analyze the code, and propose any changes 8.Parts of this code cannot be used to any other software creating without written permission of i2 July 2012, Krakow Poland """ import wx import Aggregator as Agg_module class Agg_GUI(Agg_module.MyDialog): def __init__(self, V): Agg_module.MyDialog.__init__(self, V) stand_alone = True try: Visum stand_alone = 0 except: Visum = Agg_module.Visum_Init("D:/agr.ver") if __name__ == "__main__": if stand_alone: app = wx.PySimpleApp(0) wx.InitAllImageHandlers() APNR = Agg_GUI(Visum) app.SetTopWindow(APNR) APNR.Show() if stand_alone: app.MainLoop()
# (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA from Aggregator import * from Pool import * import sys if sys.argv[1] == "-" : script = sys.stdin else : script = file(sys.argv[1]) sources = sys.argv[2:] target = Pool() aggregator = Aggregator(script) aggregator.run(target, [ Pool(file(source)) for source in sources ] ) target.Dump(sys.stdout)
############################################################################### else: status = MPI.Status() while True: # receive message work = comm.recv(source=0, tag=MPI.ANY_TAG, status=status) # check tag of received message if status.Get_tag() == DIETAG: break # do the work resultz = np.array(work) cosa = max(resultz.shape) #resultz[0,:].shape #print('cosa', cosa) if cosa == 5: # cosa[0] resultpp = resultz[np.ix_([0], [1, 2, 3])] state_agg = resultz[np.ix_([0], [4])] resultp = Aggregator.mapping_inverse(resultpp, state_agg) #np.ones(3) elif cosa == 8: #cosa[0] resultpp = resultz[np.ix_([0], [1, 2, 3])] state_agg = resultz[np.ix_([0], [4])] pred = resultz[np.ix_([0], [5, 6, 7])] (resultp, actives, momentsmat, labdist, polprime) = Aggregator.mapping(resultpp, state_agg, pred) #print(actives) else: resultpp = resultz[np.ix_([0], [1, 2, 3])] state_agg = resultz[np.ix_([0], [4])] pred = resultz[np.ix_([0], [5, 6, 7])] (resultp, actives, momentsmat, labdist, polprime) = Aggregator.mapping(resultpp, state_agg, pred) mm = 1.0 - actives #resultp = np.c_[mm,[valls]]