Exemplo n.º 1
0
 def __executeSVN(self, command, arg = "", split=False):
     command = "svnlook --%s %s %s %s %s" % (self.type, self.txnName, command, self.reposPath, arg)
     if command in self.cache:
         return self.cache[command]
     
     output = Process.execute(command)
     if split:
         output = [x.strip() for x in output.split("\n") if x.strip()]
     
     self.cache[command] = output
     return self.cache[command]
Exemplo n.º 2
0
def run(transaction, config):

    check = config.getArray("Checkstyle.CheckFiles", [".*\.java"])
    ignore = config.getArray("Checkstyle.IgnoreFiles", [])
    files = transaction.getFiles(check, ignore)

    java = config.getString("Checkstyle.Java")
    classpath = config.getString("Checkstyle.Classpath")
    config = config.getString("Checkstyle.ConfigFile")

    command = "%s -classpath %s com.puppycrawl.tools.checkstyle.Main -c %s " % (java, classpath, config)

    files = [transaction.getFile(oneFile[0]) for oneFile in files.iteritems() if oneFile[1] in ["A", "U", "UU"]]

    try:
        Process.execute(command + " ".join(files))
    except Process.ProcessException, e:
        msg = "Coding style errors found:\n\n"
        msg += e.output + "\n"
        msg += "See Checkstyle documentation for a detailed description: http://checkstyle.sourceforge.net/"
        return (msg, 1)
        input_sets = SetsGenerator(
            input_dir=input_dir,
            lang=args.lang,
            monolingual_only=args.monolingual,
            cognate_decimal=args.cognate_decimal_fraction,
            lexicon_csv=args.lexicon,
            structures_csv=args.structures,
            allow_free_structure_production=args.free_pos)
        # I had issues with joblib installation on Ubuntu 16.04.6 LTS
        # If prefer="threads", deepcopy input sets. -1 means that all CPUs will be used
        # Parallel(n_jobs=-1)(delayed(create_input_for_simulation)(sim) for sim in simulation_range)
        parallel_jobs = []
        for sim in simulation_range:  # first create all input files
            parallel_jobs.append(
                Process(target=create_input_for_simulation,
                        args=(results_dir, input_sets, cognate_experiment,
                              training_num, num_test, l2_decimal,
                              auxiliary_experiment, sim, args.randomize)))
            parallel_jobs[-1].start()
            # if number of simulations is larger than number of cores or it is the last simulation, start multiproc.
            if len(parallel_jobs
                   ) == available_cpu or sim == simulation_range[-1]:
                for p in parallel_jobs:
                    p.join()
                parallel_jobs = []

        del input_sets  # we no longer need it

    if not args.decrease_lrate or args.continue_training:  # assumption: when training continues, lrate is NOT reduced
        logging.info(
            f"Learning rate will NOT be decreased, it is set to {args.final_lrate}"
        )