Ejemplo n.º 1
0
def stopProducer():
    try:
        producer.run('tmux kill-session -t socket')
        transfer = Transfer(producer)
        transfer.put('./retrieveProducerOutput.py')
        producer.run('python3 ~/retrieveProducerOutput.py')
        transfer.get('producerResult.txt')
        producer.run('rm ~/data/_*')
    except:
        print('Socket already closed!')
Ejemplo n.º 2
0
def pgdump(c):
    cid = host.run(
        'docker container ls | grep awsdemo_db | head -c 12').stdout.strip()
    host.run(
        '''docker container exec %s sh -c "pg_dump -U awsdemo awsdemo | gzip > '/var/lib/postgresql/backups/awsdemo.gz'"'''
        % cid)
    host.run(
        'docker cp %s:/var/lib/postgresql/backups/awsdemo.gz /tmp/awsdemo.gz' %
        cid)
    t = Transfer(host)
    t.get('/tmp/awsdemo.gz')
Ejemplo n.º 3
0
def trainModel():

    # Transfer package
    transfer = Transfer(master)
    transferKafka = Transfer(kafka)
    # Transfer datagenerator
    transferKafka.put('./kafkaProducer.py')
    # start kafka
    startKafka()
    # start spark cluster
    startSparkCluster()
    # Create Package
    os.system('sbt package')
    # Transfer files to master
    transferKafka.get('/home/ronald/random_centers.csv')
    transfer.put('./random_centers.csv')
    transferKafka.get('/home/ronald/centers.csv')
    transfer.put('./centers.csv')
    transferKafka.get('/home/ronald/data.csv')
    transfer.put('./data.csv')

    # Transfer spark application
    transfer.put(
        './target/scala-2.12/streamingkmeansmodeltrained_2.12-0.1.jar')
    master.run(
        'source /etc/profile && cd $SPARK_HOME && bin/spark-submit '
        '--packages org.apache.spark:spark-streaming-kafka-0-10_2.12:3.0.0 '
        '--class example.stream.StreamingKMeansModelTraining '
        '--master spark://' + str(masterHost) + ':7077 --executor-memory 2g '
        '~/streamingkmeansmodeltrained_2.12-0.1.jar '
        '192.168.122.121:9092 '
        'consumer-group '
        'test')
    runChecker()
    stop()
Ejemplo n.º 4
0
def transferLogs():
    counter = 1
    for connection in slaveConnections:
        transfer = Transfer(connection)
        transfer.get('logs/log.csv', 'log_slave' + str(counter) + '.csv')
        counter += 1
    transfer = Transfer(master)
    transfer.get('logs/log.csv', 'log_master.csv')
    transfer = Transfer(producer)
    transfer.get('logs/log.csv', 'log_producer.csv')
Ejemplo n.º 5
0
def transfer_logs_out():
    counter = 1
    for connection in all_connections:
        transfer = Transfer(connection)
        transfer.get('logs/log.csv', 'log' + str(counter) + '.csv')
        counter += 1
Ejemplo n.º 6
0
def pgdump(c):
    cid = host.run('docker container ls | grep ' + APP_NAME.lower()  +'_postgres | head -c 12').stdout.strip()
    host.run('''docker container exec %s sh -c "pg_dump -U %s %s | gzip > '/var/lib/postgresql/backups/%s.gz'"'''  % (cid, DB_USER, DB_DB, DB_DB))
    host.run('docker cp %s:/var/lib/postgresql/backups/%s.gz /tmp/%s.gz' % (cid, DB_DB, DB_DB))
    t = Transfer(host)
    t.get('/tmp/%s.gz' % DB_NAME)
Ejemplo n.º 7
0
def transferFromSlave(filename):
    transfer = Transfer(slaveConnections[0])
    transfer.get(filename)
Ejemplo n.º 8
0
def transferFromServer(filename):
    transfer = Transfer(conn)
    transfer.get(filename)
Ejemplo n.º 9
0
def retreiveProducerOutput():
    transfer = Transfer(master)
    transfer.get('/home/ronald/tmp/spark-events/app-20201105205551-0000')
Ejemplo n.º 10
0
def transferFromMaster(filename):
    transfer = Transfer(master)
    transfer.get(filename)