def translate_constraints(triple): c,data,features_as_boolean = triple try: d = SpecTranslator.translate_constraint(c, data, features_as_boolean) except Exception as e: log.critical("Parsing failed while processing " + c + ": " + str(e)) log.critical("Exiting") sys.exit(1) return toSMT2(d["formula"]),d["features"]
def main(argv): """Main procedure """ output_file = "" modality = "" # default modality is to proceed with the reconfiguration interface_file = "" try: opts, args = getopt.getopt(argv, "ho:vk", ["help", "ofile=", "verbose", "keep", "validate", "explain", "check-interface="]) except getopt.GetoptError as err: print str(err) usage() sys.exit(1) for opt, arg in opts: if opt in ('-h', "--help"): usage() sys.exit() elif opt in ("-o", "--ofile"): output_file = arg elif opt in ("-k", "--keep"): global KEEP KEEP = True elif opt == "--validate": modality = "validate" elif opt == "--explain": modality = "explain" elif opt == "--check-interface": modality = "check-interface" interface_file = os.path.abspath(arg) elif opt in ("-v", "--verbose"): log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG) log.info("Verbose output.") if len(args) != 1: print "one arguments is required" usage() sys.exit(1) input_file = os.path.abspath(args[0]) out_stream = sys.stdout if output_file: out_stream = open(output_file, "w") features = set() initial_features = set() contexts = {} attributes = {} constraints = [] preferences = [] contexts_constraints = [] log.info("Reading input file") data = read_json(input_file) log.info("Processing attributes") for i in data["attributes"]: id = re.match("attribute\[(.*)\]", i["id"]).group(1) attributes[id] = {} attributes[id]["min"] = i["min"] attributes[id]["max"] = i["max"] attributes[id]["feature"] = re.match("feature\[(.*)\]", i["featureId"]).group(1) for i in data["configuration"]["attribute_values"]: id = re.match("attribute\[(.*)\]", i["id"]).group(1) attributes[id]["initial"] = i["value"] log.debug(unicode(attributes)) log.info("Processing contexts") for i in data["contexts"]: id = re.match("context\[(.*)\]", i["id"]).group(1) contexts[id] = {} contexts[id]["min"] = i["min"] contexts[id]["max"] = i["max"] for i in data["configuration"]["context_values"]: id = re.match("context\[(.*)\]", i["id"]).group(1) contexts[id]["initial"] = i["value"] log.debug(unicode(contexts)) log.info("Processing initial features") for i in data["configuration"]["selectedFeatures"]: initial_features.add(re.match("feature\[(.*)\]", i).group(1)) log.debug(unicode(initial_features)) log.info("Processing Constraints") for i in data["constraints"]: try: d = SpecTranslator.translate_constraint(i, data) log.debug("Find constrataint " + unicode(d)) constraints.append(d["formula"]) features.update(d["features"]) except Exception as e: log.critical("Parsing failed while processing " + i + ": " + str(e)) log.critical("Exiting") sys.exit(1) log.info("Processing Preferences") for i in data["preferences"]: try: d = SpecTranslator.translate_preference(i, data) log.debug("Find preference " + unicode(d)) preferences.append(d["formula"]) except Exception as e: log.critical("Parsing failed while processing " + i + ": " + str(e)) log.critical("Exiting") sys.exit(1) log.info("Processing Context Constraints") if "context_constraints" in data: for i in data["context_constraints"]: try: d = SpecTranslator.translate_constraint(i, data) log.debug("Find context constraint " + unicode(d)) contexts_constraints.append(d["formula"]) except Exception as e: log.critical("Parsing failed while processing " + i + ": " + str(e)) log.critical("Exiting") sys.exit(1) if modality == "validate": validate(features, initial_features, contexts, attributes, constraints, preferences, contexts_constraints, out_stream) elif modality == "explain": explain(features, initial_features, contexts, attributes, constraints, preferences, data, out_stream) elif modality == "check-interface": check_interface(features, contexts, attributes, constraints, contexts_constraints, read_json(interface_file), out_stream) else: reconfigure(features, initial_features, contexts, attributes, constraints, preferences, out_stream) log.info("Program Succesfully Ended")
def check_interface(features, contexts, attributes, constraints, contexts_constraints, interface, out_stream): """Check if the interface given is a proper interface """ # handle FM contexts_constraints i_features = set() i_contexts = {} i_attributes = {} i_constraints = [] i_contexts_constraints = [] log.info("Processing interface attributes") for i in interface["attributes"]: id = re.match("attribute\[(.*)\]", i["id"]).group(1) i_attributes[id] = {} i_attributes[id]["min"] = i["min"] i_attributes[id]["max"] = i["max"] i_attributes[id]["feature"] = re.match("feature\[(.*)\]", i["featureId"]).group(1) if (id not in attributes) or \ (attributes[id]["min"] < i_attributes[id]["min"]) or \ (attributes[id]["max"] > i_attributes[id]["max"]) : json.dump({"result": "not_valid: attribute " + id + "does not match"}, out_stream) out_stream.write("\n") return None log.debug(unicode(attributes)) log.info("Processing contexts") for i in interface["contexts"]: id = re.match("context\[(.*)\]", i["id"]).group(1) i_contexts[id] = {} i_contexts[id]["min"] = i["min"] i_contexts[id]["max"] = i["max"] if (id not in contexts) or \ (contexts[id]["min"] == i_contexts[id]["min"]) or \ (contexts[id]["max"] == i_contexts[id]["max"]): json.dump({"result": "not_valid: context " + id + "does not match"}, out_stream) out_stream.write("\n") return None log.debug(unicode(contexts)) log.info("Processing Constraints") for i in interface["constraints"]: try: d = SpecTranslator.translate_constraint(i, interface) log.debug("Find constrataint " + unicode(d)) i_constraints.append(d["formula"]) i_features.update(d["features"]) except Exception as e: log.critical("Parsing failed while processing " + i + ": " + str(e)) log.critical("Exiting") sys.exit(1) log.info("Processing Context Constraints") if "context_constraints" in interface: for i in interface["context_constraints"]: try: d = SpecTranslator.translate_constraint(i, interface) log.debug("Find context constraint " + unicode(d)) i_contexts_constraints.append(d["formula"]) except Exception as e: log.critical("Parsing failed while processing " + i + ": " + str(e)) log.critical("Exiting") sys.exit(1) log.info("Checking Context Constraints Extensibility") solver = z3.Solver() for i in contexts.keys(): solver.add(contexts[i]["min"] <= z3.Int(i)) solver.add(z3.Int(i) <= contexts[i]["max"]) solver.add(z3.And(i_contexts_constraints)) solver.add(z3.Not(z3.And(contexts_constraints))) result = solver.check() if result == z3.sat: model = solver.model() out = {"result": "not_valid: context extensibility problem", "contexts": []} for i in contexts.keys(): out["contexts"].append({"id": i, "value": unicode(model[z3.Int(i)])}) json.dump(out, out_stream) out_stream.write("\n") solver = z3.Solver() log.info("Add interface variables") for i in i_features: solver.add(0 <= z3.Int(i), z3.Int(i) <= 1) for i in i_attributes.keys(): solver.add(i_attributes[i]["min"] <= z3.Int(i), z3.Int(i) <= i_attributes[i]["max"]) for i in i_contexts.keys(): solver.add(i_contexts[i]["min"] <= z3.Int(i), z3.Int(i) <= i_contexts[i]["max"]) log.info("Add interface contexts constraints") solver.add(z3.And(i_contexts_constraints)) solver.add(z3.And(contexts_constraints)) log.info("Add interface constraints") for i in i_constraints: solver.add(i) log.info("Add FM context variables") for i in contexts.keys(): if i not in i_contexts: solver.add(contexts[i]["min"] <= z3.Int(i)) solver.add(z3.Int(i) <= contexts[i]["max"]) log.info("Building the FM formula") formulas = [] for i in features: if i not in i_features: formulas.append(0 <= z3.Int(i)) formulas.append(z3.Int(i) <= 1) for i in attributes.keys(): if i not in i_attributes: formulas.append(attributes[i]["min"] <= z3.Int(i)) formulas.append(z3.Int(i) <= attributes[i]["max"]) for i in constraints: formulas.append(i) log.info("Add forall fatures and attributes not formula") solver.add(z3.ForAll( [z3.Int(i) for i in features if i not in i_features] + [z3.Int(i) for i in attributes.keys() if i not in i_attributes.keys()], z3.Not(z3.And(formulas)) )) log.debug(solver) log.info("Computing") result = solver.check() log.info("Printing output") if result == z3.sat: model = solver.model() out = {"result": "not_valid", "contexts": [], "attributes": [], "features" : []} for i in contexts.keys(): out["contexts"].append({"id": i, "value": unicode(model[z3.Int(i)])}) for i in i_features: out["features"].append({"id": i, "value": unicode(model[z3.Int(i)])}) for i in i_attributes.keys(): out["attributes"].append({"id": i, "value": unicode(model[z3.Int(i)])}) json.dump(out, out_stream) out_stream.write("\n") else: out_stream.write('{"result":"valid"}\n')
def main(argv): """Main procedure ...""" output_file = "" dot_file = "" try: opts, args = getopt.getopt(argv,"ho:vd:",["help","ofile=","verbose","dot="]) except getopt.GetoptError as err: print str(err) usage() sys.exit(1) for opt, arg in opts: if opt == '-h': usage() sys.exit() elif opt in ("-o", "--ofile"): output_file = arg elif opt in ("-d", "--dot"): dot_file = arg elif opt in ("-v", "--verbose"): log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG) log.info("Verbose output.") if len(args) != 2: print "2 arguments are required" usage() sys.exit(1) input_file = args[0] target = args[1] if input_file == "" or target == "": print "Input file not given. Please use -i, -d, -t options" usage() sys.exit(1) input_file = os.path.abspath(input_file) pid = str(os.getpgid(0)) aeolus_universe = "/tmp/" + pid + "_universe.json" spec_file = "/tmp/" + pid + "_spec.spec" zephyrus_output = "/tmp/" + pid + "_zephyrus.json" zephyrus_output_opt = "/tmp/" + pid + "_zephyrus_opt.json" locations_file = "/tmp/" + pid + "_locations.json" script_directory = os.path.dirname(os.path.realpath(__file__)) log.info("Parsing JSON file") data = read_json(input_file) log.debug("Internal json representation") log.debug(json.dumps(data, indent=1)) log.info("Getting locations") resouce_names = process_location_file(data, locations_file) log.info("Generating universe file") service_names = generate_universe(data, aeolus_universe) log.info("Processing specification") try: spec = Spec.translate_specification(target, resouce_names, service_names) except Spec.SpecificationParsingException as e: log.critical("Parsing of the specification failed: " + e.value) log.critical("Exiting") sys.exit(1) log.debug("Zephyrus specification:") log.debug(spec) with open(spec_file, 'w') as f: f.write(spec) log.debug("---UNIVERSE---") log.debug(json.dumps(read_json(aeolus_universe),indent=1)) log.info("Running Zephyrus") if dot_file == "": proc = Popen( [settings.ZEPHYRUS_COMMAND, "-u", aeolus_universe, "-ic", locations_file, "-spec", spec_file, "-out", "stateful-json-v1", zephyrus_output, "-settings", script_directory + "/zephyrus.settings"], cwd=script_directory, stdout=PIPE, stderr=PIPE ) else: proc = Popen( [settings.ZEPHYRUS_COMMAND, "-u", aeolus_universe, "-ic", locations_file, "-spec", spec_file, "-out", "stateful-json-v1", zephyrus_output, "-out", "graph-deployment", dot_file, "-settings", script_directory + "/zephyrus.settings"], cwd=script_directory, stdout=PIPE, stderr=PIPE ) out, err = proc.communicate() log.debug("Zephyrus stdout") log.debug(out) log.debug("Zephyrus stderr") log.debug(err) if proc.returncode == 14: log.critical("Zephyrus execution terminated with return code " + str(proc.returncode)) log.critical("Specification does not admit solutions") sys.exit(1) if proc.returncode != 0: log.critical("Zephyrus execution terminated with return code " + str(proc.returncode)) log.critical("Exiting") sys.exit(1) log.debug("---FINAL CONFIGURATION---") log.debug(json.dumps(read_json(zephyrus_output),indent=1)) log.debug("---RUN BINDINGS OPTIMIZER---") proc = Popen( ["python", "bindings_opt.py", "-i", zephyrus_output, "-o", zephyrus_output_opt], cwd=script_directory, stdout=DEVNULL ) proc.wait() if proc.returncode != 0: log.critical("Bindings optimizer terminated with return code " + str(proc.returncode)) log.critical("Exiting") sys.exit(1) log.debug(json.dumps(read_json(zephyrus_output_opt),indent=1)) log.info("Generate JSON output") if output_file == "": generate_output(data, read_json(zephyrus_output_opt), sys.stdout) else: log.info("Writing to " + output_file) output_stream = open(output_file, 'w') generate_output(data, read_json(zephyrus_output_opt), output_stream) output_stream.close() log.info("Removing temp files") os.remove(aeolus_universe) os.remove(zephyrus_output) os.remove(spec_file) os.remove(locations_file) os.remove(zephyrus_output_opt) log.info("Program Succesfully Ended")
def main(input_file, num_of_process, output_file, keep, verbose, validate, validate_modality, explain, check_interface, features_as_boolean, check_features, check_features_modality, timeout, constraints_minimization, non_incremental_solver, no_default_preferences): """ INPUT_FILE Json input file """ start_time = datetime.datetime.now() modality = "reconfigure" # default modality is to proceed with the reconfiguration interface_file = "" # only one modality can be active if sum([validate,explain,check_features,(len(check_interface) > 0)]) > 1: log.critical("Only one flag among validate, explain, check-interface, and check-feature can be selected.") sys.exit(1) if check_interface and features_as_boolean: log.critical("Features check-interface and features-as-boolean are incompatible, only one can be selected.") sys.exit(-1) if validate: modality = "validate" if explain: modality = "explain" if check_interface: modality = "check-interface" interface_file = check_interface if check_features: modality = "check-features" log_level = log.ERROR if verbose == 1: log_level = log.WARNING elif verbose == 2: log_level = log.INFO elif verbose >= 3: log_level = log.DEBUG log.basicConfig(format="[%(asctime)s][%(levelname)s][%(name)s]%(message)s",level=log_level) log.info("Verbose Level: " + unicode(verbose)) if verbose: log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG) log.info("Verbose output.") if keep: global KEEP KEEP = True out_stream = sys.stdout if output_file: out_stream = open(output_file, "w") features = set() initial_features = set() contexts = {} attributes = {} constraints = [] preferences = [] contexts_constraints = [] log.info("Reading input file") data = read_json(input_file) # if no optional feature are given the default is that there are none specified if not "optional_features" in data: data["optional_features"] = {} log.info("Processing attributes") for i in data["attributes"]: id = re.match("attribute\[(.*)\]", i["id"]).group(1) attributes[id] = {} attributes[id]["min"] = i["min"] attributes[id]["max"] = i["max"] attributes[id]["feature"] = re.match("feature\[(.*)\]", i["featureId"]).group(1) if data["attributes"]: for i in data["configuration"]["attribute_values"]: id = re.match("attribute\[(.*)\]", i["id"]).group(1) attributes[id]["initial"] = i["value"] log.debug(unicode(attributes)) log.info("Processing contexts") for i in data["contexts"]: id = re.match("context\[(.*)\]", i["id"]).group(1) contexts[id] = {} contexts[id]["min"] = i["min"] contexts[id]["max"] = i["max"] if data["contexts"]: for i in data["configuration"]["context_values"]: id = re.match("context\[(.*)\]", i["id"]).group(1) contexts[id]["initial"] = i["value"] log.debug(unicode(contexts)) log.info("Processing initial features, if any") if "selectedFeatures" in data["configuration"]: for i in data["configuration"]["selectedFeatures"]: initial_features.add(re.match("feature\[(.*)\]", i).group(1)) log.debug(unicode(initial_features)) log.info("Processing Constraints") if num_of_process > 1: # convert in parallel formulas into smt and then parse it here # threads can not be used here because antlr parser seems not thread safe # the z3 expression can not be serialized log.debug("Starting to convert the constraints into smt representation") log.debug("Constraint to convert: " + unicode(len(data["constraints"]))) pool = multiprocessing.Pool(num_of_process) results = pool.map(translate_constraints, [(x,data,features_as_boolean) for x in data["constraints"]]) log.debug("Converting smt into z3 expressions") for smt_f,fs in results: constraints.append(z3.parse_smt2_string(smt_f)) features.update(fs) else: for i in data["constraints"]: try: d = SpecTranslator.translate_constraint(i, data, features_as_boolean) log.debug("Find constrataint " + unicode(d)) constraints.append(d["formula"]) features.update(d["features"]) except Exception as e: log.critical("Parsing failed while processing " + i + ": " + str(e)) log.critical("Exiting") sys.exit(1) log.info("Constraint processed so far: {}".format(len(constraints))) # possibility for reconfigure and explain modality to add directly SMT formulas if "smt_constraints" in data: log.info("Processing special input constraint modality") features.update(data["smt_constraints"]["features"]) for i in data["smt_constraints"]["formulas"]: constraints.append(z3.parse_smt2_string(i)) # for explain purposes add smt_constraint to constraints data["constraints"].append(i) log.info("Constraint processed so far: {}".format(len("constraints"))) if modality == "reconfigure": # SMT formulas direct encoding also for preferences # these preferences have the highest priority # here we assume that the features are already declared if "smt_preferences" in data: log.info("Processing special input preferences modality. Pref added as higher priority.") for i in data["smt_preferences"]: preferences.append(z3.parse_smt2_string(i)) log.info("Processing Preferences") for i in data["preferences"]: try: d = SpecTranslator.translate_preference(i, data, features_as_boolean) log.debug("Find preference " + unicode(d)) preferences.append(d["formula"]) except Exception as e: log.critical("Parsing failed while processing " + i + ": " + str(e)) log.critical("Exiting") sys.exit(1) log.info("Processing Context Constraints") if "context_constraints" in data: for i in data["context_constraints"]: try: d = SpecTranslator.translate_constraint(i, data, features_as_boolean) log.debug("Find context constraint " + unicode(d)) contexts_constraints.append(d["formula"]) except Exception as e: log.critical("Parsing failed while processing " + i + ": " + str(e)) log.critical("Exiting") sys.exit(1) start_running_time = datetime.datetime.now() if modality == "validate": import validate_module if validate_modality == "grid": validate_module.run_validate_grid_search(features, initial_features, contexts, attributes, constraints, preferences, contexts_constraints, features_as_boolean, non_incremental_solver, out_stream) elif validate_modality == "forall": validate_module.run_validate(features, initial_features, contexts, attributes, constraints, preferences, contexts_constraints, features_as_boolean, out_stream) elif modality == "explain": run_explain(features, contexts, attributes, constraints, data, features_as_boolean, constraints_minimization, out_stream) elif modality == "check-interface": run_check_interface(features, contexts, attributes, constraints, contexts_constraints, read_json(interface_file), features_as_boolean, out_stream) elif modality == "check-features": import check_features_module if check_features_modality == "grid": check_features_module.run_feature_analysis_grid_search( features, features_as_boolean, contexts, attributes, constraints, data["optional_features"], non_incremental_solver, out_stream, "" if "time_context" not in data else data["time_context"]) elif check_features_modality == "forall": check_features_module.run_feature_analysis_forall( features, features_as_boolean, contexts, attributes, constraints, data["optional_features"], non_incremental_solver, out_stream, "" if "time_context" not in data else data["time_context"]) elif check_features_modality == "pruning": check_features_module.run_feature_analysis_with_optimization( features, features_as_boolean, contexts, attributes, constraints, data["optional_features"], non_incremental_solver, out_stream, "" if "time_context" not in data else data["time_context"]) elif modality == "reconfigure": run_reconfigure(features, initial_features, contexts, attributes, constraints, preferences, features_as_boolean, timeout, no_default_preferences, out_stream) else: log.critical("No modality matched. Exiting.") sys.exit(1) delta = datetime.datetime.now() - start_running_time log.info("Seconds taken to run the backend {}".format(delta.total_seconds())) delta = datetime.datetime.now() - start_time log.info("Seconds taken to run hyvarrec {}".format(delta.total_seconds())) log.info("Program Succesfully Ended")
def run_check_interface(features, contexts, attributes, constraints, contexts_constraints, interface, features_as_boolean, out_stream): """Check if the interface given is a proper interface """ # todo possibility of using interface where features are given as boolean and not int # handle FM contexts_constraints i_features = set() i_contexts = {} i_attributes = {} i_constraints = [] i_contexts_constraints = [] log.info("Processing interface attributes") for i in interface["attributes"]: id = re.match("attribute\[(.*)\]", i["id"]).group(1) i_attributes[id] = {} i_attributes[id]["min"] = i["min"] i_attributes[id]["max"] = i["max"] i_attributes[id]["feature"] = re.match("feature\[(.*)\]", i["featureId"]).group(1) if (id not in attributes) or \ (attributes[id]["min"] < i_attributes[id]["min"]) or \ (attributes[id]["max"] > i_attributes[id]["max"]) : json.dump({"result": "not_valid: attribute " + id + "does not match"}, out_stream) out_stream.write("\n") return None log.debug(unicode(attributes)) log.info("Processing contexts") for i in interface["contexts"]: id = re.match("context\[(.*)\]", i["id"]).group(1) i_contexts[id] = {} i_contexts[id]["min"] = i["min"] i_contexts[id]["max"] = i["max"] if (id not in contexts) or \ (contexts[id]["min"] == i_contexts[id]["min"]) or \ (contexts[id]["max"] == i_contexts[id]["max"]): json.dump({"result": "not_valid: context " + id + "does not match"}, out_stream) out_stream.write("\n") return None log.debug(unicode(contexts)) log.info("Processing Constraints") for i in interface["constraints"]: try: d = SpecTranslator.translate_constraint(i, interface, features_as_boolean) log.debug("Find constraint " + unicode(d)) i_constraints.append(d["formula"]) i_features.update(d["features"]) except Exception as e: log.critical("Parsing failed while processing " + i + ": " + str(e)) log.critical("Exiting") sys.exit(1) log.info("Processing Context Constraints") if "context_constraints" in interface: for i in interface["context_constraints"]: try: d = SpecTranslator.translate_constraint(i, interface, features_as_boolean) log.debug("Find context constraint " + unicode(d)) i_contexts_constraints.append(d["formula"]) except Exception as e: log.critical("Parsing failed while processing " + i + ": " + str(e)) log.critical("Exiting") sys.exit(1) log.info("Checking Context Constraints Extensibility") solver = z3.Solver() for i in contexts.keys(): solver.add(contexts[i]["min"] <= z3.Int(i)) solver.add(z3.Int(i) <= contexts[i]["max"]) solver.add(z3.And(i_contexts_constraints)) solver.add(z3.Not(z3.And(contexts_constraints))) result = solver.check() if result == z3.sat: model = solver.model() out = {"result": "not_valid: context extensibility problem", "contexts": []} for i in contexts.keys(): out["contexts"].append({"id": i, "value": unicode(model[z3.Int(i)])}) json.dump(out, out_stream) out_stream.write("\n") solver = z3.Solver() log.info("Add interface variables") if not features_as_boolean: for i in i_features: solver.add(0 <= z3.Int(i), z3.Int(i) <= 1) for i in i_attributes.keys(): solver.add(i_attributes[i]["min"] <= z3.Int(i), z3.Int(i) <= i_attributes[i]["max"]) for i in i_contexts.keys(): solver.add(i_contexts[i]["min"] <= z3.Int(i), z3.Int(i) <= i_contexts[i]["max"]) log.info("Add interface contexts constraints") solver.add(z3.And(i_contexts_constraints)) solver.add(z3.And(contexts_constraints)) log.info("Add interface constraints") for i in i_constraints: solver.add(i) log.info("Add FM context variables") for i in contexts.keys(): if i not in i_contexts: solver.add(contexts[i]["min"] <= z3.Int(i)) solver.add(z3.Int(i) <= contexts[i]["max"]) log.info("Building the FM formula") formulas = [] if not features_as_boolean: for i in features: if i not in i_features: formulas.append(0 <= z3.Int(i)) formulas.append(z3.Int(i) <= 1) for i in attributes.keys(): if i not in i_attributes: formulas.append(attributes[i]["min"] <= z3.Int(i)) formulas.append(z3.Int(i) <= attributes[i]["max"]) for i in constraints: formulas.append(i) log.info("Add forall fatures and attributes not formula") if features_as_boolean: #todo fix print when features are given as booleans solver.add(z3.ForAll( [z3.Bool(i) for i in features if i not in i_features] + [z3.Int(i) for i in attributes.keys() if i not in i_attributes.keys()], z3.Not(z3.And(formulas)) )) else: solver.add(z3.ForAll( [z3.Int(i) for i in features if i not in i_features] + [z3.Int(i) for i in attributes.keys() if i not in i_attributes.keys()], z3.Not(z3.And(formulas)) )) log.debug(solver) log.info("Computing") result = solver.check() log.info("Printing output") if result == z3.sat: model = solver.model() out = {"result": "not_valid", "contexts": [], "attributes": [], "features" : []} for i in contexts.keys(): out["contexts"].append({"id": i, "value": unicode(model[z3.Int(i)])}) if features_as_boolean: for i in i_features: out["features"].append({"id": i, "value": unicode(model[z3.Bool(i)])}) else: for i in i_features: out["features"].append({"id": i, "value": unicode(model[z3.Int(i)])}) for i in i_attributes.keys(): out["attributes"].append({"id": i, "value": unicode(model[z3.Int(i)])}) json.dump(out, out_stream) out_stream.write("\n") else: out_stream.write('{"result":"valid"}\n')