示例#1
0
文件: make.py 项目: ADALabUCSD/SLAB
if (project_root is None):
    msg = 'Pease set environment variable "BENCHMARK_PROJECT_ROOT"'
    raise StandardError(msg)

externals = {
    'lib': '/lib',
    'disk_data': '/tests/SimpleMatrixOps (Disk Data)/output'
}
for name in externals:
    os.symlink(project_root + externals[name], '../external/' + name)

sys.path.append('../external/lib/python')
import make_utils as utils
import global_params as params

# start logging
start_make_logging()

# compile
makelog = '../../output/make.log'
utils.run_sbt('./spark', makelog=makelog)

utils.run_python(program='get_data.py')
os.putenv('SAVE_STUB', '_1')
utils.run_spark(program='SparkPreclean',
                sbt_dir='./spark',
                cmd_args='/scratch/day_1.gz true')

# stop logging
end_make_logging()
示例#2
0
        systems))
args = parser.parse_args()

# start logging
start_make_logging()

test_type = args.test_type
nodes = args.nodes
sparsity = args.sparsity
systems = args.systems
op_types = args.operators
sparse_gb = args.sparse_gb

# compile
makelog = '../../output/make.log'
utils.run_sbt('./systemml', makelog=makelog)
utils.run_sbt('./mllib', makelog=makelog)

if test_type == 'scale_nodes':
    utils.run_python(program='node_scaling_tests.py',
                     cmd_args='{} "{}" "{}" "{}" {}'.format(
                         nodes, sparsity, systems, op_types, sparse_gb))
elif test_type == 'scale_mat':
    utils.run_python(program='msize_scaling_tests.py',
                     cmd_args='{} "{}" "{}" "{}" {}'.format(
                         nodes, sparsity, systems, op_types, sparse_gb))
else:
    raise StandardError('TEST_TYPE must be one of: "scale_nodes", "scale_mat"')

remove_dir('scratch_space')