def main(arguments): """ Takes the return value of the `commandlineArguments()` function as input and trains/tests the model on manipulating sequences of text. """ tasks = makeTasks() eprint("Generated", len(tasks), "tasks") for t in tasks: t.mustTrain = False test, train = testTrainSplit(tasks, 1.) eprint("Split tasks into %d/%d test/train" % (len(test), len(train))) latest = arguments.pop("latest") challenge, challengeCheating = loadPBETasks("data/sygus" if latest else "PBE_Strings_Track") eprint("Got %d challenge PBE tasks" % len(challenge)) if arguments.pop('trainChallenge'): challengeTest, challengeTrain = testTrainSplit(challenge, 0.5) challenge = challengeTest train += challengeTrain eprint( "Incorporating %d (50%%) challenge problems into the training set." % (len(challengeTrain)), "We will evaluate on the held out challenge problems.", "This makes a total of %d training problems." % len(train)) if arguments.pop('onlyChallenge'): train = challenge test = [] challenge = [] eprint("Training only on sygus problems.") ConstantInstantiateVisitor.SINGLE = \ ConstantInstantiateVisitor(list(map(list, list({tuple([c for c in s]) for t in test + train + challenge for s in t.stringConstants})))) haveLength = not arguments.pop("noLength") haveMap = not arguments.pop("noMap") haveUnfold = not arguments.pop("noUnfold") eprint(f"Including map as a primitive? {haveMap}") eprint(f"Including length as a primitive? {haveLength}") eprint(f"Including unfold as a primitive? {haveUnfold}") baseGrammar = Grammar.uniform(primitives + [p for p in bootstrapTarget() if (p.name != "map" or haveMap) and \ (p.name != "unfold" or haveUnfold) and \ (p.name != "length" or haveLength)]) challengeGrammar = baseGrammar # Grammar.uniform(targetTextPrimitives) evaluationTimeout = 0.0005 # We will spend 10 minutes on each challenge problem challengeTimeout = 10 * 60 for t in train + test + challenge: t.maxParameters = 2 if arguments.pop("showTasks"): for source, ts in [("train",tasks),("test",test),("challenge",challenge)]: print(source,"tasks:") for t in ts: print(t.name) for xs, y in t.examples: xs = ['"' + "".join(x) + '"' for x in xs] y = "".join(y) if isinstance(y,list) else y print('f(%s) = "%s"' % (", ".join(xs), y)) print("\t{%s}" % (t.stringConstants)) print() sys.exit(0) competitionCheckpoints = arguments.pop("compete") if competitionCheckpoints: checkpoints = [] for competitionCheckpoint in competitionCheckpoints: with open(competitionCheckpoint, 'rb') as handle: checkpoints.append(dill.load(handle)) sygusCompetition(checkpoints, challenge) sys.exit(0) timestamp = datetime.datetime.now().isoformat() outputDirectory = "experimentOutputs/text/%s"%timestamp os.system("mkdir -p %s"%outputDirectory) generator = ecIterator(baseGrammar, train, testingTasks=test + challenge, outputPrefix="%s/text"%outputDirectory, evaluationTimeout=evaluationTimeout, **arguments) for result in generator: pass
def main(args): """ Takes the return value of the `commandlineArguments()` function as input and trains/tests the model on LOGO tasks. """ # The below legacy global statement is required since prefix_dreams is used by LogoFeatureCNN. # TODO(lcary): use argument passing instead of global variables. global prefix_dreams # The below global statement is required since primitives is modified within main(). # TODO(lcary): use a function call to retrieve and declare primitives instead. global primitives visualizeCheckpoint = args.pop("visualize") if visualizeCheckpoint is not None: with open(visualizeCheckpoint, 'rb') as handle: primitives = pickle.load(handle).grammars[-1].primitives visualizePrimitives(primitives) sys.exit(0) dreamCheckpoint = args.pop("dreamCheckpoint") dreamDirectory = args.pop("dreamDirectory") proto = args.pop("proto") if dreamCheckpoint is not None: #outputDreams(dreamCheckpoint, dreamDirectory) enumerateDreams(dreamCheckpoint, dreamDirectory) sys.exit(0) animateCheckpoint = args.pop("animate") if animateCheckpoint is not None: animateSolutions(loadPickle(animateCheckpoint).allFrontiers) sys.exit(0) target = args.pop("target") red = args.pop("reduce") save = args.pop("save") prefix = args.pop("prefix") prefix_dreams = prefix + "/dreams/" + ('_'.join(target)) + "/" prefix_pickles = prefix + "/logo." + ('.'.join(target)) if not os.path.exists(prefix_dreams): os.makedirs(prefix_dreams) tasks = makeTasks(target, proto) eprint("Generated", len(tasks), "tasks") costMatters = args.pop("cost") for t in tasks: t.specialTask[1]["costMatters"] = costMatters # disgusting hack - include whether cost matters in the dummy input if costMatters: t.examples = [(([1]), t.examples[0][1])] os.chdir("prototypical-networks") subprocess.Popen(["python", "./protonet_server.py"]) time.sleep(3) os.chdir("..") test, train = testTrainSplit(tasks, args.pop("split")) eprint("Split tasks into %d/%d test/train" % (len(test), len(train))) try: if test: montageTasks(test, "test_") montageTasks(train, "train_") except: eprint( "WARNING: couldn't generate montage. Do you have an old version of scipy?" ) if red is not []: for reducing in red: try: with open(reducing, 'r') as f: prods = json.load(f) for e in prods: e = Program.parse(e) if e.isInvented: primitives.append(e) except EOFError: eprint("Couldn't grab frontier from " + reducing) except IOError: eprint("Couldn't grab frontier from " + reducing) except json.decoder.JSONDecodeError: eprint("Couldn't grab frontier from " + reducing) primitives = list(OrderedDict((x, True) for x in primitives).keys()) baseGrammar = Grammar.uniform(primitives, continuationType=turtle) eprint(baseGrammar) timestamp = datetime.datetime.now().isoformat() outputDirectory = "experimentOutputs/logo/%s" % timestamp os.system("mkdir -p %s" % outputDirectory) generator = ecIterator(baseGrammar, train, testingTasks=test, outputPrefix="%s/logo" % outputDirectory, evaluationTimeout=0.01, **args) r = None for result in generator: iteration = len(result.learningCurve) dreamDirectory = "%s/dreams_%d" % (outputDirectory, iteration) os.system("mkdir -p %s" % dreamDirectory) eprint("Dreaming into directory", dreamDirectory) dreamFromGrammar(result.grammars[-1], dreamDirectory) r = result needsExport = [ str(z) for _, _, z in r.grammars[-1].productions if z.isInvented ] if save is not None: with open(save, 'w') as f: json.dump(needsExport, f)