def QuerySchema(self, descriptors):
     (aggregatorScript, sourceIds) = self._AggregatorScriptFor(descriptors)
     aggregator = Aggregator(cStringIO.StringIO(aggregatorScript))
     result = Schema()
     aggregator.run(
         result,
         [self.sources[sourceId].QuerySchema() for sourceId in sourceIds])
     return result
 def UpdateDescriptors(self, id, pool, descriptors=None):
     if descriptors == None:
         descriptors = pool.PresentAttributes()
     scripts = self._DisgregatorScripts(descriptors)
     for source, script in scripts.items():
         if script == "": continue
         disgregator = Aggregator(cStringIO.StringIO(script))
         try:
             result = self.sources[source].QueryDescriptors(id)
         except:
             result = Pool()
         disgregator.run(result, [pool])
         self.sources[source].UpdateDescriptors(id, result)
 def QueryDescriptors(self, id, descriptors):
     if self.verbose: print "++ Building aggregation script..."
     (aggregatorScript, sourceIds) = self._AggregatorScriptFor(descriptors)
     aggregator = Aggregator(cStringIO.StringIO(aggregatorScript))
     result = Pool()
     sourcesPools = []
     for sourceId in sourceIds:
         if self.verbose:
             print "++ Querying descriptors from %s..." % sourceId
         sourcePool = self.sources[sourceId].QueryDescriptors(id)
         sourcesPools.append(sourcePool)
     if self.verbose: print "++ Aggregating..."
     aggregator.run(result, sourcesPools)
     return result
Ejemplo n.º 4
0
        if sanity_check_counter == int(sanity_interval / interval):
            content = previous_content
            sanity_check_counter = 0
        if len(content) != 0:
            socketio.emit('newdata', content, namespace='/api')
        socketio.sleep(interval)


@app.route('/')
def hello_world():
    return render_template('index.html')


@socketio.on('slider', namespace='/api')
def slider(data):
    print('slider value updated: %s' % data.get('value'))


@socketio.on('connect', namespace='/api')
def connect():
    socketio.emit('newdata', aggregator.get_content(), namespace='/api')


if __name__ == '__main__':
    aggregator = Aggregator.Aggregator()
    command = sys.argv[1] if len(sys.argv) > 1 else "python3 ./test.py"
    aggregator.register_component(command)
    aggregator.start_gathering()
    socketio.start_background_task(target=update)
    socketio.run(app, host='0.0.0.0', port=8080)
Ejemplo n.º 5
0
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA

from Aggregator import *
from Pool import *
import sys

if sys.argv[1] == "-":
    script = sys.stdin
else:
    script = file(sys.argv[1])
sources = sys.argv[2:]

target = Pool()
aggregator = Aggregator(script)
aggregator.run(target, [Pool(file(source)) for source in sources])

target.Dump(sys.stdout)
Ejemplo n.º 6
0
 def helperTestParser(self, input):
     aggregator = Aggregator(cStringIO.StringIO(input))
     sink = cStringIO.StringIO()
     aggregator.dump(sink)
     return sink.getvalue()
Ejemplo n.º 7
0
#Program Entry Point
from Generator import *
import tensorflow as tf
from Discriminator import *
from Aggregator import *
from DataPrep import *

#Add support for altering images. (ie flip image. etc..)
#All ops are for 3d tensors, so something like this has to be used..
#result = tf.map_fn(lambda img: tf.image.random_flip_left_right(img), images)
#Add leaky relu
with tf.Session() as sess:
    batchSize = 64
    numIters = 500
    gen = Generator(batchSize)
    discrim = Discriminator(batchSize, gen)
    a = Aggregator(sess, discrim)
    saver = tf.train.Saver()
    try:
        saver.restore(sess, "savedModel.ckpt")
        print("Successfully Restored Model!!")
    except:
        sess.run(tf.global_variables_initializer())
        print("No model available for restoration")

    allData = loadAllData()

    a.learn(allData, numIters, batchSize)
    saver.save(sess, "savedModel.ckpt")