def runProduction(): nCores = cluster.getNCores() nSlaves = cluster.getNSlaves() maxSlaves = int(nCores / 2) memory = cluster.getMemory() cluster.runOnMaster('hdfs dfs -rm -r -f /brisbane1-T4000-BP__BP_R150cm_T2000.avro') command = ' '.join([ 'spark-submit', # '--jars /mnt/data/package/softs/jars/AvroToPythonConverters-1.3.0.jar', '--jars /mnt/data/package/softs/jars/spark-avro_2.10-2.0.1-SNAPSHOT.jar', 'irt/processor/scripts/prodLauncher.py', 'runSparkAvro brisbane1-T4000-BP', '--mode cluster', '--block 1000', '--band P', '--res 1.5', '--modeStore memory', '--tile 2000', '--nbThreads 2', '--nbSlaves {0}'.format(nSlaves), '--maxSlaves {0}'.format(maxSlaves), '--modeProduction L2area']) cluster.runOnMasterX(command)
def runBigPerou(): nCores = cluster.getNCores() nSlaves = cluster.getNSlaves() memory = cluster.getMemory() maxSlaves = int(nCores / 2) # cluster.runOnMasterX( # 'python2.7 irt/processor/scripts/prodLauncher.py write perou 2000 P') # cluster.runOnMasterX('hdfs dfs -put $PYIRT_DIR_WORK/perou-T2000-BP* /') command = ' '.join([ 'spark-submit', '--jars /mnt/data/package/softs/jars/AvroToPythonConverters-1.3.0.jar', 'irt/processor/scripts/prodLauncher.py', 'runSparkAvro perou-T2000-BP', '--mode cluster', '--block 1000', '--band P', '--res 1', # '--modeStore memory', '--tile 2000', '--nbThreads 2', '--nbSlaves {0}'.format(nSlaves), '--maxSlaves {0}'.format(maxSlaves), '--modeProduction L2area']) cluster.runOnMasterX(command)
def viewPerou(): timeStart = getCurrentTime() nCores = cluster.getNCores() maxSlaves = int(nCores / 2) memory = cluster.getMemory() command = ' '.join([ 'spark-submit', '--jars /mnt/data/package/softs/jars/AvroToPythonConverters-1.3.0.jar', 'irt/processor/prodL2GeospyResult.py perou-T2000-BP__BP_R100cm_T2000.avro']) cluster.runOnMasterX(command) printElapsed("Viewing", getCurrentTime() - timeStart)