Beispiel #1
0
def generate_tests(args, classpath, class_list_file, test_src_dir, junit_after_path, rc_classpath, time_limit=200, output_limit=4000):

  # Methods to be omitted due to non-determinism.
  omitted_methods = "\"(org\\.la4j\\.operation\\.ooplace\\.OoPlaceKroneckerProduct\\.applyCommon)|(PseudoOracle\\.verifyFace)|(org\\.znerd\\.math\\.NumberCentral\\.createRandomInteger)|(org\\.jbox2d\\.common\\.MathUtils\\.randomFloat.*)|(org\\.jbox2d\\.utests\\.MathTest\\.testFastMath)|(org\\.jbox2d\\.testbed\\.tests\\.DynamicTreeTest.*)|(org\\.la4j\\.Matrix.*)\""

  selection_log_file = "dljc-out/selection-log.txt"
  operation_log_file = "dljc-out/operation-history-log.txt"
  randoop_log_file = "dljc-out/randoop-log.txt"

  randoop_command = ["java", "-ea",
                     "-classpath", classpath,
                     "-Xbootclasspath/a:{}".format(rc_classpath),
                     "-javaagent:{}".format(rc_classpath),
                     "randoop.main.Main", "gentests",
                     "--classlist={}".format(class_list_file),
                     "--time-limit={}".format(time_limit),
                     "--omitmethods={}".format(omitted_methods),
                     "--junit-reflection-allowed=false",
                     "--flaky-test-behavior=DISCARD",
                     "--usethreads=true",
                     "--call-timeout=5",
                     "--silently-ignore-bad-class-names=true",
                     "--junit-output-dir={}".format(test_src_dir),
                     # Uncomment these lines to produce Randoop debugging logs
                     #"--log={}".format(randoop_log_file),
                     "--selection-log={}".format(selection_log_file),
                     "--operation-history-log={}".format(operation_log_file)]

  if junit_after_path:
    randoop_command.append("--junit-after-all={}".format(junit_after_path))

  if output_limit and output_limit > 0:
    randoop_command.append('--output-limit={}'.format(output_limit))

  common.run_cmd(randoop_command, args, 'randoop')
Beispiel #2
0
def compile_test_cases(args, classpath, test_class_directory, files_to_compile):
  compile_command = ["javac", "-g",
                     "-classpath", classpath,
                     "-d", test_class_directory]
  compile_command.extend(files_to_compile)

  common.run_cmd(compile_command, args, 'randoop')
Beispiel #3
0
def run(args, javac_commands, jars):
  if not args.graph_jar:
    print "Could not run graph tool: missing arg --graph-jar"
    return

  tool_command = ["java", "-jar", args.graph_jar]

  for jc in javac_commands:
    java_files = jc['java_files']
    java_files_file = os.path.join(os.getcwd(), '__java_file_names.txt')

    class_dir = common.class_directory(jc)

    with open(java_files_file, 'w') as f:
      for s in java_files:
        f.write(s)
        f.write("\n")

    current_outdir = os.path.join(args.output_directory,
                                  class_dir.replace(os.getcwd(),'').replace(os.sep,"_"))

    cmd = tool_command + ["-o", current_outdir,
                          "-j", class_dir,
                          "-all",
                          "-source", java_files_file]

    common.run_cmd(cmd)
Beispiel #4
0
def run(args, javac_commands, jars):
  if not args.time_limit:
    args.time_limit = 2  

  if not args.test_limit:
    args.test_limit = 2  

  if not args.out_dir:
    args.out_dir = "./dyntrace_output"

  current_dir = os.path.dirname(os.path.realpath(__file__))
  current_dir = os.path.join(current_dir, os.pardir)
  current_dir = os.path.join(current_dir, os.pardir)
  current_dir += "/"


  dyntrace_command = ["java", "-jar", os.path.join(current_dir, "build/libs/DynSlicer.jar")]

  i = 1

  for jc in javac_commands:
    javac_switches = jc['javac_switches']
    cmd = dyntrace_command + [common.classpath(jc), common.class_directory(jc), args.out_dir, current_dir, args.time_limit, args.test_limit]

    common.run_cmd(cmd)
    i = i + 1
Beispiel #5
0
def run(args, javac_commands, jars):
  bixie_jar = os.path.join(args.lib_dir, "bixie.jar")

  base_command = ["java",
                  "-jar", bixie_jar,
                  "-html", os.path.join(args.output_directory, 'bixie_report')]

  i = 1

  for jc in javac_commands:
    cmd = copy.copy(base_command)

    if common.classpath(jc):
      cmd.extend(["-cp", common.classpath(jc)])

    if common.class_directory(jc):
      cmd.extend(["-j", common.class_directory(jc)])

    if common.source_path(jc):
      cmd.extend(['-src', common.source_path(jc)])

    out_filename = 'bixie-report-{}.log'.format(i)
    cmd.extend(['-o', os.path.join(args.output_directory, out_filename)])

    common.run_cmd(cmd, args, 'bixie')
    i = i + 1
def add_project_to_corpus(project):
  """ Assumes that the project_dir contains a
  text file named build_command.txt that contains the build command(s) for the
  project in this directory, and a clean_command.txt that will clean the project.
  """
  common.clean_project(project)

  """Run dljc
  Run Randoop to generate test sources
  Compile test sources
  Run daikon.Chicory on tests to create dtrace file
  Precompute graph kernels that are independent of ontology stuff
  """
  common.run_dljc(project,
                  ['dyntrace', 'graphtool'],
                  ['--graph-jar', common.get_jar('prog2dfg.jar'),
                   '--dyntrace-libs', common.LIBS_DIR])

  """ run petablox """
  #run_petablox(project_dir)

  """ run graph kernel computation """
  project_dir = common.get_project_dir(project)
  kernel_file_path = common.get_kernel_path(project)
  graph_kernel_cmd = ['python',
                      common.get_simprog('precompute_kernel.py'),
                      project_dir,
                      kernel_file_path
                      ]
  common.run_cmd(graph_kernel_cmd)
  print 'Generated kernel file for {0}.'.format(project)
  return kernel_file_path
Beispiel #7
0
def run(args, javac_commands, jars):
    # the dist directory if CFI.
    CFI_dist = os.path.join(os.environ['JSR308'], 'checker-framework-inference', 'dist')
    CFI_command = ['java']

    print os.environ

    for jc in javac_commands:
        target_cp = jc['javac_switches']['classpath'] + \
            ':' + os.path.join(args.lib_dir, 'ontology.jar')

        cp = target_cp + \
             ':' + os.path.join(CFI_dist, 'checker.jar') + \
             ':' + os.path.join(CFI_dist, 'plume.jar') + \
             ':' + os.path.join(CFI_dist, 'com.microsoft.z3.jar') + \
             ':' + os.path.join(CFI_dist, 'checker-framework-inference.jar')

        if 'CLASSPATH' in os.environ:
            cp += ':' + os.environ['CLASSPATH']

        cmd = CFI_command + ['-classpath', cp,
                             'checkers.inference.InferenceLauncher',
                             '--solverArgs', args.solverArgs,
                             '--cfArgs', args.cfArgs,
                             '--checker', args.checker,
                             '--solver', args.solver,
                             '--mode', args.mode,
                             '--hacks=true',
                             '--targetclasspath', target_cp,
                             '--logLevel=WARNING',
                             '-afud', args.afuOutputDir]
        cmd.extend(jc['java_files'])

        common.run_cmd(cmd, args, 'infer')
def main(options):
    server_version = options.server_version
    scanner_version = options.scanner_version
    src_of_the_plugins = options.plugins_folder
    src_of_the_project = options.projects_folder
    noa = options.number_of_attempts
    wait = options.wait
    system = platform.system()
    dst = options.client_folder
    print_log_files = options.print_log
    common.mkdir(dst)

    # 0, a) Try to build the plugins with 'build.py'

    if system == 'Windows':
        common.run_cmd('py', ['-3', 'build.py', '--all'])
    elif system == 'Linux':
        common.run_cmd('python3', ['tools/build.py', '--all'])

    if options.init == True:
        # 0, b) download sonar-server'

        download_sq_server(server_version, dst)

        # 1) download sonar-scanner

        download_sq_scanner(scanner_version, system, dst)

        # 2) unzip both server and scanner

        src = os.path.join(dst, 'sonarqube-%s.zip' % server_version)
        unzip(src, dst)
        if 'Windows' == system:
            src = os.path.join(dst, 'sonar-scanner-cli-%s-windows.zip' % scanner_version)
        elif 'Linux' == system:
            src = os.path.join(dst, 'sonar-scanner-cli-%s-linux.zip' % scanner_version)
        unzip(src, dst)

    # 3) copy the plugins into the server dir

    path = [dst, 'sonarqube-%s' % server_version, 'extensions', 'plugins']
    path = os.path.join(*path)
    copy_all_files_from_folder(src_of_the_plugins, path)

    # 4) start the server with the defult config

    start_sq_server(server_version, system, dst)

    # 5) Validate the server is started succesfully
    # 6) Analyze the given project

    sleep(60)
    if validate_running_of_sq_server(server_version, noa, wait):
        print('SonarQube started properly!')
    else:
        print(('SonarQube did not start in time (-noa=%s (number of attempts))' % (noa)))
        if print_log_files:
            print_log(server_version, dst)
        exit(1)
def run_petablox(project):
  with common.cd(common.get_project_dir(project)):
    petablox_cmd = ['java',
                    '-cp', common.get_jar('petablox.jar'),
                    '-Dpetablox.reflect.kind=none',
                    '-Dpetablox.run.analyses=cipa-0cfa-dlog',
                    'petablox.project.Boot']
    common.run_cmd(petablox_cmd)
Beispiel #10
0
def run_chicory(chicory_classpath, classes_to_include, main_class, out_dir):
  chicory_command = ["java",
                     "-classpath", chicory_classpath,
                     "daikon.Chicory",
                     "--output_dir={}".format(out_dir),
                     main_class]

  common.run_cmd(chicory_command)
def recompile_checker_framework():
  if not os.environ.get('JAVA_HOME'):
    print "ERROR in pa2checker.recompile_checker_framework(): Gradle will fail if your JAVA_HOME environment variable is unset. Please set it and try again."
    sys.exit(0)
  type_infer_tool_dir = os.path.join(common.TOOLS_DIR, "checker-framework-inference")
  with common.cd(type_infer_tool_dir):
    common.setup_checker_framework_env()
    common.run_cmd(["gradle", "dist", "-i"], print_output=True)
Beispiel #12
0
def run(args, javac_commands, jars):
    for jc in javac_commands:
        pprint.pformat(jc)

        class_path = jc['javac_switches']['classpath']

        cmd = get_tool_command(args, class_path, jc['java_files'])

        common.run_cmd(cmd, args, 'check')
Beispiel #13
0
def compile_test_cases(compile_classpath, test_class_directory, files_to_compile):
  if not os.path.exists(test_class_directory):
    os.mkdir(test_class_directory)

  compile_command = ["javac", "-g",
                     "-classpath", compile_classpath,
                     "-d", test_class_directory]
  compile_command.extend(files_to_compile)

  common.run_cmd(compile_command)
def run():
    dbcfgs = json.loads(dbcfgs_json)

    TRAF_DIR = '%s-%s' % (dbcfgs['traf_basename'], dbcfgs['traf_version'])

    # untar traf package
    TRAF_PACKAGE_FILE = '/tmp/' + dbcfgs['traf_package'].split('/')[-1]
    run_cmd('mkdir -p %s' % TRAF_DIR)
    run_cmd('tar xf %s -C %s' % (TRAF_PACKAGE_FILE, TRAF_DIR))

    print 'Trafodion package extracted successfully!'
def run():
    dbcfgs = json.loads(dbcfgs_json)

    traf_dirname = dbcfgs['traf_dirname']

    # untar traf package, package comes from copy_files.py
    traf_package_file = '/tmp/' + dbcfgs['traf_package'].split('/')[-1]
    run_cmd('mkdir -p ~/%s' % traf_dirname)
    run_cmd('tar xf %s -C ~/%s' % (traf_package_file, traf_dirname))

    print 'Trafodion package extracted successfully!'
Beispiel #16
0
def run(args, javac_commands, jars):
    # checker-framework javac.
    javacheck = os.environ['JSR308']+"/checker-framework/checker/bin/javac"
    checker_command = [javacheck, "-processor", args.checker]

    for jc in javac_commands:
        pprint.pformat(jc)
        javac_switches = jc['javac_switches']
        cp = javac_switches['classpath']
        java_files = ' '.join(jc['java_files'])
        cmd = checker_command + ["-classpath", cp, java_files]
        common.run_cmd(cmd, args, 'check')
Beispiel #17
0
def usersguide():
    if platform.system() == 'Windows':
        common.run_cmd('py', ['-3', 'generatedoc.py', '-css', 'style\\SourceMeter.css', '-html'],
                False, 'doc/usersguide')
    else:
        common.run_cmd('python3', ['generatedoc.py', '-css', 'style/SourceMeter.css', '-html'],
                False, 'doc/usersguide')
    try:
        shutil.copy('doc/usersguide/results/UG.html', 'doc/UG.html')
        shutil.copy('doc/UG.html', 'src/sonarqube-gui-plugin/src/main/resources/static/help/usersguide.html')
    except OSError:
        print('Cannot copy usersguide. Please check if it was generated successfully.')
Beispiel #18
0
def daikon_print_xml(args, classpath, out_dir):
  daikon_command = ["java", "-Xmx4G",
                    "-classpath", classpath,
                    "daikon.PrintInvariants",
                    "--wrap_xml",
                    "--output", os.path.join(out_dir, "invariants.xml"),
                    os.path.join(out_dir, "invariants.gz")]

  common.run_cmd(daikon_command, args, 'daikon')
  js = jsoninv.generate_json_invariants(args, out_dir)
  with open(os.path.join(out_dir, 'invariants.json'), 'w') as f:
    json.dump(js, f)
Beispiel #19
0
def run_dyncomp(args, classpath, main_class, out_dir, selects=[], omits=[]):
    dyncomp_command = [
        "java", "-Xmx3G", "-classpath", classpath, "daikon.DynComp",
        "--approximate-omitted-ppts", "--output-dir={}".format(out_dir)
    ]

    if no_jdk:
        dyncomp_command.append("--rt-file=none")
    dyncomp_command.extend(selects)
    dyncomp_command.extend(omits)
    dyncomp_command.append(main_class)

    common.run_cmd(dyncomp_command, args, 'dyncomp')
def analyze(scanner_version, project_folder, system, dst):
    cmd = ''
    cwd = os.getcwd()
    scanner_location =  ''
    if system == 'Windows':
        scanner_location = cwd  + '\\' + dst + '\\sonar-scanner-%s-windows\\bin\\sonar-scanner.bat' % scanner_version
        os.chdir(project_folder)
        common.run_cmd('start', [scanner_location])
    elif system == 'Linux':
        cmd = cwd + '/' + dst + '/sonar-scanner-%s-linux/bin/sonar-scanner&' % scanner_version
        os.chdir(project_folder)
        common.run_cmd(cmd, [])
    os.chdir('..')
Beispiel #21
0
def analyze(scanner_version, project_folder, system, dst):
    cmd = ''
    cwd = os.getcwd()
    scanner_location =  ''
    if system == 'Windows':
        scanner_location = cwd  + '\\' + dst + '\\sonar-scanner-%s-windows\\bin\\sonar-scanner.bat' % scanner_version
        os.chdir(project_folder)
        common.run_cmd('start', [scanner_location])
    elif system == 'Linux':
        cmd = cwd + '/' + dst + '/sonar-scanner-%s-linux/bin/sonar-scanner&' % scanner_version
        os.chdir(project_folder)
        common.run_cmd(cmd, [])
    os.chdir('..')
Beispiel #22
0
def process_run(run_number):
    processing_time = time.time()
    verbose_msg("> starting run", run_number)
    run_cmd(f"bash runner{run_number}.sh")
    aod_name = f"AODRun5.{run_number}.root"
    if not os.path.isfile(aod_name):
        msg(f"++ something went wrong for run {run_number}, no output AOD file {aod_name} found.",
            f"Please check: 'AODRun5.{run_number}.log'",
            color=bcolors.FAIL)
    verbose_msg("< complete run", run_number)
    processing_time = time.time() - processing_time
    verbose_msg(f"-- took {processing_time} seconds --",
                color=bcolors.BOKGREEN)
def run():
    dbcfgs = json.loads(dbcfgs_json)
    if 'APACHE' in dbcfgs['distro']:
        modcfgs = ParseJson(MODCFG_FILE).load()
        MOD_CFGS = modcfgs['MOD_CFGS']

        hdfs_xml_file = dbcfgs['hdfs_xml_file']
        hbase_xml_file = dbcfgs['hbase_xml_file']

        hbasexml = ParseXML(hbase_xml_file)
        for key, value in MOD_CFGS['hbase-site'].items():
            hbasexml.add_property(key, value)
        hbasexml.write_xml()

        hdfsxml = ParseXML(hdfs_xml_file)
        for key, value in MOD_CFGS['hdfs-site'].items():
            hdfsxml.add_property(key, value)
        hdfsxml.write_xml()

        print 'Apache Hadoop modification completed'
        first_node = dbcfgs['first_rsnode']
        local_host = socket.gethostname()
        if first_node in local_host:
            hadoop_home = dbcfgs['hadoop_home']
            hbase_home = dbcfgs['hbase_home']
            # stop
            run_cmd(hbase_home + '/bin/stop-hbase.sh')
            run_cmd(hadoop_home + '/sbin/stop-dfs.sh')
            # start
            run_cmd(hadoop_home + '/sbin/start-dfs.sh')
            run_cmd(hbase_home + '/bin/start-hbase.sh')

            print 'Apache Hadoop restart completed'
    else:
        print 'no apache distribution found, skipping'
def run():
    dbcfgs = json.loads(dbcfgs_json)
    if 'APACHE' in dbcfgs['distro']:
        modcfgs = ParseJson(MODCFG_FILE).load()
        MOD_CFGS = modcfgs['MOD_CFGS']

        hdfs_xml_file = dbcfgs['hdfs_xml_file']
        hbase_xml_file = dbcfgs['hbase_xml_file']

        hbasexml = ParseXML(hbase_xml_file)
        for key, value in MOD_CFGS['hbase-site'].items():
            hbasexml.add_property(key, value)
        hbasexml.write_xml()

        hdfsxml = ParseXML(hdfs_xml_file)
        for key, value in MOD_CFGS['hdfs-site'].items():
            hdfsxml.add_property(key, value)
        hdfsxml.write_xml()

        print 'Apache Hadoop modification completed'
        first_node = dbcfgs['first_rsnode']
        local_host = socket.gethostname()
        if first_node in local_host:
            hadoop_home = dbcfgs['hadoop_home']
            hbase_home = dbcfgs['hbase_home']
            # stop
            run_cmd(hbase_home + '/bin/stop-hbase.sh')
            run_cmd(hadoop_home + '/sbin/stop-dfs.sh')
            # start
            run_cmd(hadoop_home + '/sbin/start-dfs.sh')
            run_cmd(hbase_home + '/bin/start-hbase.sh')

            print 'Apache Hadoop restart completed'
    else:
        print 'no apache distribution found, skipping'
Beispiel #25
0
def run_chicory(args, classpath, main_class, out_dir, selects=[], omits=[]):
  chicory_command = ["java", "-Xmx3G",
                     "-classpath", classpath,
                     "daikon.Chicory",
                     "--output_dir={}".format(out_dir)]

  dc_out_path = os.path.join(out_dir, "RegressionTestDriver.decls-DynComp")
  chicory_command.append("--comparability-file={}".format(dc_out_path))

  chicory_command.extend(selects)
  chicory_command.extend(omits)
  chicory_command.append(main_class)

  common.run_cmd(chicory_command, args, 'chicory')
Beispiel #26
0
def run_chicory(args, classpath, main_class, out_dir, selects=[], omits=[]):
  chicory_command = ["java", "-Xmx3G",
                     "-classpath", classpath,
                     "daikon.Chicory",
                     "--output_dir={}".format(out_dir)]

  dc_out_path = os.path.join(out_dir, "RegressionTestDriver.decls-DynComp")
  chicory_command.append("--comparability-file={}".format(dc_out_path))

  chicory_command.extend(selects)
  chicory_command.extend(omits)
  chicory_command.append(main_class)

  common.run_cmd(chicory_command, args, 'chicory')
Beispiel #27
0
def generate_tests(randoop_classpath, class_list_file, test_src_dir, time_limit, output_limit):
  randoop_command = ["java", "-ea",
                     "-classpath", randoop_classpath,
                     "randoop.main.Main", "gentests",
                     '--classlist={}'.format(class_list_file),
                     "--timelimit={}".format(time_limit),
                     "--junit-reflection-allowed=false",
                     "--silently-ignore-bad-class-names=true",
                     '--junit-output-dir={}'.format(test_src_dir)]

  if output_limit and output_limit > 0:
    randoop_command.append('--outputlimit={}'.format(output_limit))

  common.run_cmd(randoop_command)
Beispiel #28
0
def run_dyncomp(args, classpath, main_class, out_dir, selects=[], omits=[]):
  dyncomp_command = ["java", "-Xmx3G",
                     "-classpath", classpath,
                     "daikon.DynComp",
                     "--approximate-omitted-ppts",
                     "--output-dir={}".format(out_dir)]

  if no_jdk:
      dyncomp_command.append("--rt-file=none")
  dyncomp_command.extend(selects)
  dyncomp_command.extend(omits)
  dyncomp_command.append(main_class)

  common.run_cmd(dyncomp_command, args, 'dyncomp')
Beispiel #29
0
def build_image(repo_path, repo_name):
    for dockerfile, image in yield_dockerfiles(repo_path, repo_name):
        try:
            build_cmd = [
                'docker', 'build', '-t', image, '-f', dockerfile, repo_path
            ]
            if os.environ.get('PULL_BASEIMAGES', '0') == '1':
                build_cmd.append('--pull')
            run_cmd(build_cmd)
        except subprocess.CalledProcessError:
            logging.error('Failed to build %s!' % dockerfile)
            sys.exit(1)

        time.sleep(1)
def run():
    hdfs_bin = '/usr/bin/hdfs'

    dbcfgs = json.loads(dbcfgs_json)
    DISTRO = dbcfgs['distro']

    if 'CDH' in DISTRO:
        parcel_lib = '/opt/cloudera/parcels/CDH/lib/hbase/lib'
        if os.path.exists(parcel_lib):
            hdfs_bin = '/opt/cloudera/parcels/CDH/bin/hdfs'
    elif 'APACHE' in DISTRO:
        hdfs_bin = dbcfgs['hadoop_home'] + '/bin/hdfs'

    traf_loc = '/user/trafodion'
    traf_user = dbcfgs['traf_user']
    hdfs_user = dbcfgs['hdfs_user']
    hbase_user = dbcfgs['hbase_user']

    run_cmd_as_user(hdfs_user, '%s dfsadmin -safemode wait' % hdfs_bin)
    run_cmd_as_user(
        hdfs_user,
        '%s dfs -mkdir -p %s/{trafodion_backups,bulkload,lobs} /bulkload /lobs /hbase/archive /hbase-staging'
        % (hdfs_bin, traf_loc))
    run_cmd_as_user(
        hdfs_user, '%s dfs -chown -R %s:%s /hbase/archive /hbase-staging' %
        (hdfs_bin, hbase_user, hbase_user))
    run_cmd_as_user(
        hdfs_user,
        '%s dfs -chown -R %s:%s %s/{trafodion_backups,bulkload,lobs} /bulkload /lobs'
        % (hdfs_bin, traf_user, traf_user, traf_loc))
    run_cmd_as_user(
        hdfs_user, '%s dfs -setfacl -R -m user:%s:rwx /hbase/archive' %
        (hdfs_bin, traf_user))
    run_cmd_as_user(
        hdfs_user, '%s dfs -setfacl -R -m default:user:%s:rwx /hbase/archive' %
        (hdfs_bin, traf_user))
    run_cmd_as_user(
        hdfs_user, '%s dfs -setfacl -R -m mask::rwx /hbase/archive' % hdfs_bin)

    # Grant all privileges to the Trafodion principal in HBase
    if dbcfgs['secure_hadoop'] == 'Y':
        run_cmd(
            'grant "%s", "RWXC" | sudo -u %s hbase shell > /tmp/hbase_shell.out'
            % (traf_user, hbase_user))
        has_err = cmd_output('grep -c ERROR /tmp/hbase_shell.out')
        if int(has_err):
            err('Failed to grant HBase privileges to %s' % traf_user)
        run_cmd('rm /tmp/hbase_shell.out')
Beispiel #31
0
def __download_program(program_url):
    filename = os.path.basename(program_url)
    save_path = '%s/%s' % (ypc_bin, filename)
    logger.info('Downloading analysis program to path: %s' % save_path)
    ret = common.run_cmd('''wget -O %s %s''' % (save_path, program_url))
    logger.info(ret)
    return save_path
Beispiel #32
0
def run_minimap2(reference):
    log.info('run minimap2')
    fastq = '%s.hq.fastq' % prefix
    sam_sort = "%s.hq_isoforms.fastq.sorted.sam" % prefix
    cmd = 'shifter --image=robegan21/minimap2:2.10 minimap2 -t %s -ax splice -uf --secondary=no -C5 %s %s | sort -k 3,3 -k 4,4n > %s ' % (n_threads, reference, fastq, sam_sort)
    std_out, std_err, exit_code = run_cmd(cmd, log)
    log.info("minimap2 complete")
Beispiel #33
0
def run_daikon(args, classpath, out_dir, invcounts):
  daikon_command = ["java", "-Xmx4G",
                     "-classpath", classpath,
                     "daikon.Daikon",
                     "-o", os.path.join(out_dir, "invariants.gz")]
  if invcounts:
    daikon_command.append("--config_option")
    daikon_command.append("daikon.Daikon.calc_possible_invs=true")
  if no_ternary:
    daikon_command.append("--config_option")
    daikon_command.append("daikon.inv.ternary.threeScalar.LinearTernary.enabled=false")
    daikon_command.append("--config_option")
    daikon_command.append("daikon.inv.ternary.threeScalar.LinearTernaryFloat.enabled=false")
  daikon_command.append(os.path.join(out_dir, "RegressionTestDriver.dtrace.gz"))

  common.run_cmd(daikon_command, args, 'daikon')
 def proceed(handle_exit=True):
     msg(f"Downloading '{toget}'", color=bcolors.OKGREEN)
     print_now()
     if Version == 0:
         cpycmd = "alien_cp -v {} file:{}".format(toget, todir)
     else:
         cpycmd = "alien_cp -v {} file://{}".format(toget, todir)
     verbose_msg("Running command", cpycmd)
     if handle_exit:
         try:
             run_cmd(cpycmd)
         except KeyboardInterrupt:
             return False
     else:
         run_cmd(cpycmd)
         return True
Beispiel #35
0
def run_daikon(args, classpath, out_dir, invcounts):
  daikon_command = ["java", "-Xmx4G",
                     "-classpath", classpath,
                     "daikon.Daikon",
                     "-o", os.path.join(out_dir, "invariants.gz")]
  if invcounts:
      daikon_command.append("--config_option")
      daikon_command.append("daikon.Daikon.calc_possible_invs=true")
  if no_ternary:
      daikon_command.append("--config_option")
      daikon_command.append("daikon.inv.ternary.threeScalar.LinearTernary.enabled=false")
      daikon_command.append("--config_option")
      daikon_command.append("daikon.inv.ternary.threeScalar.LinearTernaryFloat.enabled=false")
  daikon_command.append(os.path.join(out_dir, "RegressionTestDriver.dtrace.gz"))

  common.run_cmd(daikon_command, args, 'daikon')
def run():
    hdfs_bin = DEF_HDFS_BIN

    dbcfgs = json.loads(dbcfgs_json)
    distro = dbcfgs['distro']

    if 'CDH' in distro:
        parcel_lib = PARCEL_HBASE_LIB
        if os.path.exists(parcel_lib): hdfs_bin = PARCEL_HDFS_BIN
    elif 'APACHE' in distro:
        hdfs_bin = dbcfgs['hadoop_home'] + '/bin/hdfs'

    traf_loc = '/user/trafodion'
    traf_user = dbcfgs['traf_user']
    hdfs_user = dbcfgs['hdfs_user']
    hbase_user = dbcfgs['hbase_user']

    run_cmd_as_user(hdfs_user, '%s dfsadmin -safemode wait' % hdfs_bin)
    run_cmd_as_user(
        hdfs_user,
        '%s dfs -mkdir -p %s/{trafodion_backups,bulkload,lobs} /bulkload /lobs /hbase/archive /hbase-staging'
        % (hdfs_bin, traf_loc))
    run_cmd_as_user(
        hdfs_user, '%s dfs -chown -R %s:%s /hbase/archive /hbase-staging' %
        (hdfs_bin, hbase_user, hbase_user))
    run_cmd_as_user(
        hdfs_user,
        '%s dfs -chown -R %s:%s %s %s/{trafodion_backups,bulkload,lobs} /bulkload /lobs'
        % (hdfs_bin, traf_user, traf_user, traf_loc, traf_loc))
    run_cmd_as_user(
        hdfs_user, '%s dfs -setfacl -R -m user:%s:rwx /hbase/archive' %
        (hdfs_bin, traf_user))
    run_cmd_as_user(
        hdfs_user, '%s dfs -setfacl -R -m default:user:%s:rwx /hbase/archive' %
        (hdfs_bin, traf_user))
    run_cmd_as_user(
        hdfs_user, '%s dfs -setfacl -R -m mask::rwx /hbase/archive' % hdfs_bin)

    # Grant all privileges to the Trafodion principal in HBase
    if dbcfgs['secure_hadoop'] == 'Y':
        run_cmd(
            'echo "grant \'%s\', \'RWXC\'" | %s su - %s -s /bin/bash -c "hbase shell" > /tmp/hbase_shell.out'
            % (traf_user, get_sudo_prefix(), hbase_user))
        has_err = cmd_output('grep -c ERROR /tmp/hbase_shell.out')
        if int(has_err):
            err('Failed to grant HBase privileges to %s' % traf_user)
        run_cmd('rm /tmp/hbase_shell.out')
Beispiel #37
0
def run():
    dbcfgs = json.loads(dbcfgs_json)

    nodes = dbcfgs['node_list'].split(',')
    scratch_locs = dbcfgs['scratch_locs'].split(',')

    # this script is running by trafodion user, so get sqroot from env
    traf_conf = os.environ['TRAF_CONF']
    if traf_conf == '': err('TRAF_CONF var is empty')
    sqconfig_file = traf_conf + '/sqconfig'

    traf_var = os.environ['TRAF_VAR']
    if traf_var == '': err('TRAF_VAR var is empty')
    sqconfig_db_file = traf_var + '/sqconfig.db'

    # If the configuration database file is not yet created,
    # build the 'sqconfig' file with the nodes specified and compile it.
    if not os.path.exists(sqconfig_db_file):
        core, processor = run_cmd("lscpu|grep -E '(^CPU\(s\)|^Socket\(s\))'|awk '{print $2}'").split('\n')[:2]
        core = int(core)-1 if int(core) <= 256 else 255

        lines = ['begin node\n']
        for node_id, node in enumerate(nodes):
            line = 'node-id=%s;node-name=%s;cores=0-%d;processors=%s;roles=connection,aggregation,storage\n' % (node_id, node, core, processor)
            lines.append(line)

        lines.append('end node\n')
        lines.append('\n')
        lines.append('begin overflow\n')

        for scratch_loc in scratch_locs:
            line = 'hdd %s\n' % scratch_loc
            lines.append(line)

        lines.append('end overflow\n')

        # write out the node section
        with open(sqconfig_file, 'w') as f:
            f.writelines(lines)

        print 'sqconfig generated successfully!'

        run_cmd('sqgen')

        print 'sqgen ran successfully!'
    else:
        print 'Using existing configuration (%s)' % sqconfig_file
Beispiel #38
0
def run_o2_analysis(tmp_script_name,
                    remove_tmp_script=False,
                    explore_bad_files=False,
                    time_it=True):
    global number_of_runs
    verbose_msg("> starting run with", tmp_script_name)
    cmd = f"bash {tmp_script_name}"
    if do_bash_script:
        with open("parallelbash.sh", "a") as fout:
            with open("parallelbash.sh", "r") as fin:
                lastline = fin.readlines()[-1]
                if lastline.startswith("#"):
                    lastline = int(lastline.strip("#"))
                else:
                    lastline = 0
                fout.write(f"echo Running {lastline}\n")
                fout.write(f"{cmd} &\n")
                lastline += 1
                if lastline % (bash_parallel_jobs + 1) == 0:
                    fout.write(f"wait\n")
                fout.write(f"\n#{lastline}\n")

        return

    if explore_bad_files:
        if run_cmd(cmd, check_status=True, throw_fatal=False,
                   time_it=time_it) == False:
            list_name = os.listdir(os.path.dirname(tmp_script_name))
            for i in list_name:
                if "ListForRun5Analysis" in i:
                    list_name = i
                    break
            if type(list_name) != list:
                with open(
                        os.path.join(os.path.dirname(tmp_script_name),
                                     list_name)) as f:
                    list_name = []
                    for i in f:
                        list_name.append(i)
            warning_msg("Issue when running", tmp_script_name, "with",
                        list_name)
    else:
        run_cmd(cmd, log_file=f"{tmp_script_name}.log", time_it=time_it)
    if remove_tmp_script:
        os.remove(tmp_script_name)
    verbose_msg("< end run with", tmp_script_name)
    return tmp_script_name
Beispiel #39
0
 def compilable(args, java_file, classpath):
     """
     Test if a given java_file is compilable.
     Return True if compilable, False otherwise.
     """
     compile_cmd = ['javac', '-classpath', classpath, java_file]
     compile_status = common.run_cmd(compile_cmd, args, None)
     return compile_status['return_code'] == 0
Beispiel #40
0
def _wait_ping_response(device, timeout):
    start_time = int(time.time())
    while (int(time.time()) < start_time + timeout):
        rv = common.run_cmd(["ping", "-c", "1", device.ip], raise_on_err=False)
        if (rv.retval == 0):
            return

    raise NoConnectionError
def get_redis_info(host, port):
    # pdb.set_trace()
    cmd = "redis-cli -h %s -p %s info" % (host, port)
    k, v = run_cmd(cmd)
    if k:
        return change_dict(v)
    else:
        print "get info form redis error host %s ,prot %s" % (host, port)
Beispiel #42
0
 def running_tool(args, file_list, target_classpath, log_file_path=None):
     """
     Runnning the debugged tool on given file_list.
     Return the tool execution return code.
     """
     tool_cmd = SUPPORT_TOOLS[args.debuggedTool].get_tool_command(args, target_classpath, file_list)
     execute_status = common.run_cmd(tool_cmd, args, log_file_path)
     return execute_status
Beispiel #43
0
def run_sqanti(reference,ann):
    log.info("sqanti ...")
    if not os.path.isdir("sqanti_out"):
         os.makedirs("sqanti_out")
    #cmd = "shifter --image=mjblow/sqanti:v1 sqanti_qc.py -d sqanti_out -g {0}.isoform.collapsed.gtf {1} {2} > sqanti.log 2>&1 ".format(prefix, ann, reference)
    cmd = "shifter --image=mjblow/sqanti:v1 sqanti_qc.py -d sqanti_out -g {0}.isoform.collapsed.filtered.gtf {1} {2} > sqanti.log 2>&1 ".format(prefix, ann, reference)
    std_out, std_err, exit_code =  run_cmd(cmd, log)
    log.info("sqanti complete")
Beispiel #44
0
def run():
    dbcfgs = json.loads(dbcfgs_json)

    nodes = dbcfgs['node_list'].split(',')
    scratch_locs = dbcfgs['scratch_locs'].split(',')

    # this script is running by trafodion user, so get sqroot from env
    traf_home = os.environ['TRAF_HOME']
    if traf_home == '': err('TRAF_HOME var is empty')
    sqconfig_file = traf_home + '/sql/scripts/sqconfig'
    sqconfig_persist_file = traf_home + '/sql/scripts/sqconfig.persist'

    core, processor = run_cmd(
        "lscpu|grep -E '(^CPU\(s\)|^Socket\(s\))'|awk '{print $2}'").split(
            '\n')[:2]
    core = int(core) - 1 if int(core) <= 256 else 255

    lines = ['begin node\n']
    if len(nodes) == 1:
        lines.append('_virtualnodes 2\n')
    else:
        for node_id, node in enumerate(nodes):
            line = 'node-id=%s;node-name=%s;cores=0-%d;processors=%s;roles=connection,aggregation,storage\n' % (
                node_id, node, core, processor)
            lines.append(line)

    lines.append('end node\n')
    lines.append('\n')
    lines.append('begin overflow\n')

    for scratch_loc in scratch_locs:
        line = 'hdd %s\n' % scratch_loc
        lines.append(line)

    lines.append('end overflow\n')

    # write out the node section
    with open(sqconfig_file, 'w') as f:
        f.writelines(lines)

    print 'sqconfig generated successfully!'

    run_cmd('sqgen')

    print 'sqgen ran successfully!'
Beispiel #45
0
def _validate_plugins():
    app_name = app_metadata['APP_NAME']
    build_path = os.path.abspath(build_dir)
    pluginval_bin_path = None
    plugin_paths = []

    if platform_name == 'Darwin':
        pluginval_bin_path = os.path.join(pluginval_path, 'bin', 'mac',
                                          'pluginval.app', 'Contents', 'MacOS',
                                          'pluginval')

        for c in build_configs:
            plugin_paths += [
                os.path.join(build_path, c,
                             '{app_name}.component'.format(app_name=app_name)),
                os.path.join(build_path, c,
                             '{app_name}.vst'.format(app_name=app_name)),
                os.path.join(build_path, c,
                             '{app_name}.vst3'.format(app_name=app_name)),
            ]
    elif platform_name == 'Windows':
        pluginval_bin_path = os.path.join(pluginval_path, 'bin', 'windows',
                                          'pluginval.exe')

        for c in build_configs:
            arch_x64 = build_archs_win[1]
            plugin_paths += [
                os.path.join(build_path, arch_x64, c, 'VST',
                             '{app_name}.dll'.format(app_name=app_name)),
                os.path.join(build_path, arch_x64, c, 'VST3',
                             '{app_name}.vst3'.format(app_name=app_name)),
            ]
    elif platform_name == 'Linux':
        pluginval_bin_path = os.path.join(pluginval_path, 'bin', 'linux',
                                          'pluginval')

        plugin_paths += [
            os.path.join(build_path,
                         'lib{app_name}.so'.format(app_name=app_name))
        ]

    for p in plugin_paths:
        run_cmd(
            '{pluginval_path} --strictness-level 5 --validate "{plugin_path}"'.
            format(pluginval_path=pluginval_bin_path, plugin_path=p))
def run():
    """ install Trafodion dependencies """

    dbcfgs = json.loads(dbcfgs_json)

    node_list = dbcfgs['node_list'].split(',')

    offline = True if dbcfgs['offline_mode'] == 'Y' else False

    if offline:
        repo_content = LOCAL_REPO_PTR % (dbcfgs['repo_ip'],
                                         dbcfgs['repo_http_port'])
        with open(REPO_FILE, 'w') as f:
            f.write(repo_content)

    if not offline and not os.path.exists(EPEL_FILE):
        run_cmd('yum install -y epel-release')

    package_list = [
        'apr',
        'apr-util',
        'expect',
        'gzip',
        'libiodbc-devel',
        'lzo',
        'lzop',
        'pdsh',  # epel
        'perl-DBD-SQLite',
        'perl-Params-Validate',
        'perl-Time-HiRes',
        'protobuf',  # epel
        'sqlite',
        'snappy',
        'unixODBC-devel',
        'unzip'
    ]

    if dbcfgs['ldap_security'].upper() == 'Y':
        package_list += ['openldap-clients']

    all_pkg_list = run_cmd('rpm -qa')
    for pkg in package_list:
        if pkg in all_pkg_list:
            print 'Package %s had already been installed' % pkg
        else:
            print 'Installing %s ...' % pkg
            if offline:
                run_cmd(
                    'yum install -y --disablerepo=\* --enablerepo=traflocal %s'
                    % pkg)
            else:
                run_cmd('yum install -y %s' % pkg)

    # pdsh should not exist on single node
    if len(node_list) == 1 and not dbcfgs.has_key(
            'traf_shadow'):  # means we are running in adding node mode
        cmd_output('yum remove -y pdsh')

    # remove temp repo file
    if offline: os.remove(REPO_FILE)
def get_hdfs_list(path):
    return_dict = dict()
    cmd = "hadoop fs -du -s %s" % path
    output = run_cmd(cmd)
    if output:
        return_dict[output.split()[1]] = output.split()[0]
    else:
        return_dict[path] = 0
    return return_dict
def start_sq_server(version, system, dst):
    cmd = ''
    cwd = os.getcwd()
    if system == 'Windows':
        sonar_location = cwd + '\\' + dst + '\\' + 'sonarqube-%s\\bin\\windows-x86-64\\StartSonar.bat' % version
        common.run_cmd('start', [sonar_location])
    elif system == 'Linux':
        sonar_location = cwd + '/' + dst + '/sonarqube-%s' % version

        temp = sonar_location
        for root, dirs, files in os.walk(temp):
            for file in files:
                file_path = os.path.join(root, file)
                os.chmod(file_path, 0o744)

        cmd = os.path.join(sonar_location, 'bin/linux-x86-64/sonar.sh')
        common.run_cmd(cmd, ['start', '&'])
    print('Starting SQ server...')
Beispiel #49
0
def start_sq_server(version, system, dst):
    cmd = ''
    cwd = os.getcwd()
    if system == 'Windows':
        sonar_location = cwd + '\\' + dst + '\\' + 'sonarqube-%s\\bin\\windows-x86-64\\StartSonar.bat' % version
        common.run_cmd('start', [sonar_location])
    elif system == 'Linux':
        sonar_location = cwd + '/' + dst + '/sonarqube-%s' % version

        temp = sonar_location
        for root, dirs, files in os.walk(temp):
            for file in files:
                file_path = os.path.join(root, file)
                os.chmod(file_path, 0o744)

        cmd = os.path.join(sonar_location, 'bin/linux-x86-64/sonar.sh')
        common.run_cmd(cmd, ['start', '&'])
    print('Starting SQ server...')
Beispiel #50
0
def generate_tests(args, classpath, class_list_file, test_src_dir, junit_after_path, time_limit=200, output_limit=4000):
  randoop_command = ["java", "-ea",
                     "-classpath", classpath,
                     "randoop.main.Main", "gentests",
                     '--classlist={}'.format(class_list_file),
                     "--timelimit={}".format(time_limit),
                     "--junit-reflection-allowed=false",
                     "--ignore-flaky-tests=true",
                     "--timeout=5",
                     "--silently-ignore-bad-class-names=true",
                     '--junit-output-dir={}'.format(test_src_dir)]

  if junit_after_path:
    randoop_command.append("--junit-after-all={}".format(junit_after_path))

  if output_limit and output_limit > 0:
    randoop_command.append('--outputlimit={}'.format(output_limit))

  common.run_cmd(randoop_command, args, 'randoop')
Beispiel #51
0
def get_hdfs_list(path):
    return_dict = dict()
    cmd = "hadoop fs -ls -R %s" % path
    output = run_cmd(cmd)
    for line in output.split("\n"):
        if len(line.split()) == 8:
            filename = line.split()[7]
            size = line.split()[4]
            return_dict[filename] = size
    return return_dict
def main():
    with common.cd(WORKING_DIR):
        test_dtrace = "test.dtrace.gz"
        test_inv_name = "TestInvariant"
        ontology_to_daikon.create_daikon_invariant("README.md", test_inv_name)
        cmd = ["javac", "-classpath", daikon_jar + ":.", test_inv_name + ".java"]
        common.run_cmd(cmd, print_output=True)
        print ("Finding program points")
        ppts = find_ppts_that_establish_inv(test_dtrace, WORKING_DIR, test_inv_name)
        print ("deleting temp files")
        os.remove(test_inv_name + ".class")
        os.remove(test_inv_name + ".java")
        os.remove("test.inv.gz")
        # output = run_daikon_on_dtrace_file(test_dtrace, checked_invariant="daikon.inv.unary.sequence.EltwiseIntLessThan")
        # print output
        # ppts = find_ppts_that_establish_inv_in_daikon_output(output, " sorted by ")
        print ("Methods that establish FirstMuseInvariant:")
        for ppt in ppts:
            print ppt
def merge_aod(in_path="",
              out_path="./",
              input_file="AO2D.root",
              must_have="ctf",
              bunch_size=50,
              skip_already_existing=True):
    in_path = os.path.normpath(in_path)
    out_path = os.path.normpath(out_path)
    file_list = []
    for root, dirs, files in os.walk(in_path):
        for file in files:
            if file == input_file:
                to_merge = os.path.abspath(os.path.join(root, file))
                print(to_merge)
                if must_have is not None and must_have in to_merge:
                    file_list.append(to_merge)
    verbose_msg("Found", len(file_list), "files called", input_file)
    # Divide it in bunches
    file_list = [
        file_list[i:i + bunch_size]
        for i in range(0, len(file_list), bunch_size)
    ]
    for i in enumerate(file_list):
        bunch_size = 0
        with open("inputfile.txt", "w") as f:
            for j in i[1]:
                f.write(f"{j}\n")
                bunch_size += os.path.getsize(j)
        out_aod = os.path.join(out_path, f"AO2D_{i[0]}.root")
        verbose_msg("Merging bunch of", len(i[1]), "files. I.e.",
                    bunch_size * 1e-6, "MB")
        if skip_already_existing and os.path.isfile(out_aod):
            verbose_msg(out_aod, "already existing, skipping")
            continue
        tmp_aod = os.path.join(out_path, "MergedAOD.root")
        run_cmd(
            f"o2-aod-merger --input inputfile.txt --output {tmp_aod} --skip-non-existing-files",
            comment=f"Merging AODs into {out_aod}")
        os.rename(tmp_aod, out_aod)
        merged_size = os.path.getsize(out_aod)
        msg("Produced a merged file of", merged_size * 1e-6, "MB from",
            bunch_size * 1e-6, "MB, compression:", merged_size / bunch_size)
def run():
    dbcfgs = json.loads(dbcfgs_json)

    nodes = dbcfgs['node_list'].split(',')
    scratch_locs = dbcfgs['scratch_locs'].split(',')

    # this script is running by trafodion user, so get sqroot from env
    traf_home = os.environ['TRAF_HOME']
    if traf_home == '': err('TRAF_HOME var is empty')
    sqconfig_file = traf_home + '/sql/scripts/sqconfig'
    sqconfig_persist_file = traf_home + '/sql/scripts/sqconfig.persist'

    core, processor = run_cmd("lscpu|grep -E '(^CPU\(s\)|^Socket\(s\))'|awk '{print $2}'").split('\n')[:2]
    core = int(core)-1 if int(core) <= 256 else 255

    lines = ['begin node\n']
    if len(nodes) == 1:
        lines.append('_virtualnodes 2\n')
    else:
        for node_id, node in enumerate(nodes):
            line = 'node-id=%s;node-name=%s;cores=0-%d;processors=%s;roles=connection,aggregation,storage\n' % (node_id, node, core, processor)
            lines.append(line)

    lines.append('end node\n')
    lines.append('\n')
    lines.append('begin overflow\n')

    for scratch_loc in scratch_locs:
        line = 'hdd %s\n' % scratch_loc
        lines.append(line)

    lines.append('end overflow\n')

    # write out the node section
    with open(sqconfig_file, 'w') as f:
        f.writelines(lines)

    print 'sqconfig generated successfully!'

    run_cmd('sqgen')

    print 'sqgen ran successfully!'
Beispiel #55
0
def _build_pluginval():
    root_path = os.getcwd()

    pluginval_build_path = os.path.join(pluginval_path, 'install')
    os.chdir(pluginval_build_path)
    root_path_rel = os.path.relpath(root_path)

    if platform_name == 'Darwin':
        plugin_build_bin_name = 'mac_build'
    elif platform_name == 'Windows':
        plugin_build_bin_name = 'windows_build.bat'
    elif platform_name == 'Linux':
        plugin_build_bin_name = 'linux_build'

    pluginval_build_bin_path = os.path.join(pluginval_build_path,
                                            plugin_build_bin_name)

    run_cmd(pluginval_build_bin_path)

    os.chdir(root_path_rel)
Beispiel #56
0
 def set_config(config_file, config, value):
     config = config.strip()
     value = value.strip()
     config_string = f"{config} {value}"
     run_cmd("sed -i -e \""
             f"s/{config} .*$/{config_string}"
             "\" " + config_file)
     # Checking that the file has the correct configuration
     with open(config_file) as f:
         has_it = False
         config_string = config_string.replace("\\", "").strip("/")
         for lineno, line in enumerate(f):
             if line.strip() == config_string:
                 verbose_msg(f"Found config string '{config_string}'",
                             f"at line #{lineno} '{line.strip()}'")
                 has_it = True
                 break
         if not has_it:
             fatal_msg("Configuration file", config_file,
                       f"does not have config string '{config_string}'")
Beispiel #57
0
def run_daikon_on_dtrace_file(dtrace_file,
                              classpath=daikon_jar,
                              checked_invariant=None):
    cmd = ["java", "-classpath", classpath, "daikon.DaikonSimple", dtrace_file]
    if checked_invariant:
        cmd += [
            "--disable-all-invariants", "--user-defined-invariant",
            checked_invariant
        ]
        cmd += ["--config_option", "daikon.Daikon.undo_opts=true"]
    return common.run_cmd(cmd, print_output=True)['output']
Beispiel #58
0
def run(args, javac_commands, jars):
    print os.environ
    idx = 0
    for jc in javac_commands:
        jaif_file = "logs/infer_result_{}.jaif".format(idx)
        cmd = get_tool_command(args, jc['javac_switches']['classpath'], jc['java_files'], jaif_file)
        status = common.run_cmd(cmd, args, 'infer')
        if args.crashExit and not status['return_code'] == 0:
            print "----- CF Inference/Typecheck crashed! Terminates DLJC. -----"
            sys.exit(1)
        idx += 1