Example #1
0
    def __init__(self, filename, rand_seed=None, numthreads=1):
        MinisatSubsetSolver.__init__(self,
                                     filename,
                                     rand_seed,
                                     store_dimacs=True)
        self.core_pattern = re.compile(r'^v [\d ]+$', re.MULTILINE)
        self.numthreads = numthreads
        self.parallel = (numthreads > 1)

        binary = 'muser2-para'
        self.muser_path = os.path.join(
            os.path.dirname(os.path.realpath(__file__)), binary)
        utils.check_executable("MUSer2", self.muser_path)

        self._proc = None  # track the MUSer process
        atexit.register(self.cleanup)
Example #2
0
def main():

    # Get environment path
    environment_path = utils.get_environment()

    # If no environment is defined, put user in root environment.
    if not environment_path:

        msg = "\n\nCould not find the \"environment.conf\" file in \"{path}\"."
        msg += "\nPlease create an environment pointer file and save it as "
        msg += "\"{path}/environment.conf\"."
        msg += "\nYou can also modify the included example "
        msg += "\"{path}/environment.conf.example\", and rename to "
        msg += "\"{path}/environment.conf\"."
        msg += "\n\nYou are in the root environment of Conda. "
        msg += "The \"conda\" command is available to use now."
        path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
        path = path.replace("\\", "/")

        print msg.format(path=path)

        return

    # If requested to put user into the root environment.
    if environment_path == "root":

        msg = "You are in the root environment of Conda. "
        msg += "The \"conda\" command is available to use now."
        path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
        path = path.replace("\\", "/")

        print msg.format(path=path)

        return

    # Add conda_git_deployment module to environment.
    # Also removing PYTHONPATH that conda root environment needs.
    path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
    os.environ["PYTHONPATH"] = path

    # Get environment data.
    environment_string = ""
    if os.path.exists(environment_path):
        f = open(environment_path, "r")
        environment_string = f.read()
        f.close()
    else:
        msg = "Could not find \"{0}\" on disk."
        print msg.format(environment_path)

    if not environment_string:
        environment_string = requests.get(environment_path).text

    environment_data = utils.read_yaml(environment_string)

    # Export environment
    if (utils.get_arguments()["export"]
            or utils.get_arguments()["export-without-commit"]):
        repositories_path = os.path.abspath(
            os.path.join(os.path.dirname(__file__), "..", "repositories",
                         environment_data["name"]))

        # Get commit hash and name from repositories on disk.
        if not utils.check_executable("git"):
            subprocess.call(
                ["conda", "install", "-c", "anaconda", "git", "-y"])
        disk_repos = {}
        for repo in os.listdir(repositories_path):
            path = os.path.join(repositories_path, repo)
            if not os.path.exists(os.path.join(path, ".git")):
                continue

            commit_hash = subprocess.check_output(["git", "rev-parse", "HEAD"],
                                                  cwd=path).rsplit()[0]

            disk_repos[repo] = commit_hash

        # Construct new git dependencies.
        git_data = {"git": []}
        for item in environment_data["dependencies"]:
            if "git" in item:
                for repo in item["git"]:

                    # Get url from enviroment file.
                    url = ""
                    if isinstance(repo, str):
                        url = repo
                    if isinstance(repo, dict):
                        url = repo.keys()[0]

                    # Skip any repositories that aren't cloned yet.
                    name = url.split("/")[-1].replace(".git", "").split("@")[0]
                    if name not in disk_repos.keys():
                        continue

                    # Construct commit url if requested.
                    commit_url = url.split("@")[0]
                    if not utils.get_arguments()["export-without-commit"]:
                        commit_url += "@" + disk_repos[name]

                    if isinstance(repo, str):
                        git_data["git"].append(commit_url)

                    if isinstance(repo, dict):
                        git_data["git"].append({commit_url: repo[url]})

        # Replace git dependencies
        for item in environment_data["dependencies"]:
            if "git" in item:
                environment_data["dependencies"].remove(item)

        environment_data["dependencies"].append(git_data)

        # Write environment file
        utils.write_yaml(environment_data,
                         os.path.join(os.getcwd(), "environment.yml"))

        return

    # Writing original environment to disk
    data_file = os.path.join(tempfile.gettempdir(),
                             'data_%s.yml' % os.getpid())
    utils.write_yaml(environment_data, data_file)

    # Remove git from environment as its not supported by conda (yet).
    for item in environment_data["dependencies"]:
        if "git" in item:
            index = environment_data["dependencies"].index(item)
            del environment_data["dependencies"][index]

    # Create environment file from passed environment.
    environment_filename = os.path.join(tempfile.gettempdir(),
                                        'env_%s.yml' % os.getpid())

    utils.write_yaml(environment_data, environment_filename)

    args = ["conda", "env", "create"]

    # Force environment update/rebuild when requested by command.
    if utils.get_arguments()["update-environment"]:
        args.append("--force")

    # Check whether the environment installed is different from the requested
    # environment, and whether the conda-git-deployment is different.
    # Force environment update/rebuild if different.
    environment_update = False
    if not utils.get_arguments()["suppress-environment-update"]:
        incoming_md5 = hashlib.md5(environment_string +
                                   "cwd: {0}".format(os.getcwd())).hexdigest()
        existing_md5 = ""

        md5_path = os.path.join(os.path.expanduser("~"), "AppData", "Local",
                                "Continuum", "Miniconda2",
                                environment_data["name"] + ".md5")
        if os.path.exists(md5_path):
            f = open(md5_path, "r")
            existing_md5 = f.read()
            f.close()

        if incoming_md5 != existing_md5:
            environment_update = True
            if "--force" not in args:
                args.append("--force")

        with open(md5_path, "w") as the_file:
            the_file.write(incoming_md5)

    # Create environment
    args.extend(["-f", environment_filename])

    return_code = subprocess.call(args)

    os.remove(environment_filename)

    # Spawning a new process to get the correct python executable and
    # passing data via file on disk.
    platform_script = "environment.sh"
    if platform.system().lower() == "windows":
        platform_script = "environment.bat"

    args = [
        os.path.join(os.path.dirname(__file__), platform_script),
        environment_data["name"],
        os.path.join(os.path.dirname(__file__), "install.py"), data_file
    ]

    args.extend(sys.argv[1:])

    # If its the first installation, we need to pass update to install.py
    if not return_code:
        args.append("--update-environment")

    if platform.system().lower() != "windows":
        args.insert(0, "bash")

    if environment_update and "--update-environment" not in args:
        args.append("--update-environment")

    subprocess.call(args)
 def __start_mongo_server__():
     dbpath = config.get_mongodb()
     utils.check_executable("mongod")
     subprocess.call(["mongod", "--dbpath", dbpath])
Example #4
0
            src = os.path.join(tempdir, "conda-git-deployment", ".git")
            dst = os.path.join(path, ".git")
            shutil.copytree(src, dst)

            # Initialising git repository
            subprocess.call(["git", "init"], cwd=path)
            subprocess.call(["git", "add", "."], cwd=path)
        except:
            print "Making conda-git-deployment into git repository failed."
            shutil.rmtree(tempdir)


if __name__ == "__main__":

    # Install git if its not available
    if not utils.check_executable("git"):
        subprocess.call(["conda", "install", "-c", "anaconda", "git", "-y"])

    # Git initialise
    initialise_git()

    # Git update
    if (utils.get_arguments()["update-environment"]
            or utils.get_arguments()["update-repositories"]):
        update()

    # Execute install
    args = [
        "python",
        os.path.join(os.path.dirname(__file__), "environment.py")
    ]
def parse_args():
    parser = argparse.ArgumentParser()

    # Standard arguments
    parser.add_argument(
        'infile',
        nargs='?',
        type=argparse.FileType('rb'),
        default=sys.stdin,
        help=
        "name of file to process (STDIN if omitted, in which case use --cnf or --smt)"
    )
    parser.add_argument(
        '-v',
        '--verbose',
        action='count',
        default=0,
        help=
        "print more verbose output (constraint indexes for MUSes/MCSes) -- repeat the flag for detail about the algorithm's progress)"
    )
    parser.add_argument('-a',
                        '--alltimes',
                        action='store_true',
                        help="print the time for every output")
    parser.add_argument('-s',
                        '--stats',
                        action='store_true',
                        help="print timing statistics to stderr")
    parser.add_argument('-T',
                        '--timeout',
                        type=int,
                        default=None,
                        help="limit the runtime to TIMEOUT seconds")
    parser.add_argument(
        '-l',
        '--limit',
        type=int,
        default=None,
        help="limit number of subsets output (counting both MCSes and MUSes)")
    type_group = parser.add_mutually_exclusive_group()
    type_group.add_argument(
        '--cnf',
        action='store_true',
        help=
        "assume input is in DIMACS CNF or Group CNF format (autodetected if filename is *.[g]cnf or *.[g]cnf.gz)."
    )
    type_group.add_argument(
        '--smt',
        action='store_true',
        help=
        "assume input is in SMT2 format (autodetected if filename is *.smt2).")
    parser.add_argument(
        '-b',
        '--bias',
        type=str,
        choices=['MUSes', 'MCSes'],
        default='MUSes',
        help=
        "bias the search toward MUSes or MCSes early in the execution [default: MUSes] -- all will be enumerated eventually; this just uses heuristics to find more of one or the other early in the enumeration."
    )
    parser.add_argument(
        '--print-mcses',
        action='store_true',
        help=
        "for every satisfiable subset found, print the constraints in its complementary MCS instead of the MSS."
    )
    parser.add_argument(
        '--check-muser',
        action='store_true',
        help=
        "just run a check of the MUSer2 helper application and exit (used to configure tests)."
    )

    # Parallelization arguments
    par_group = parser.add_argument_group(
        'Parallelization options',
        "Enable and configure parallel MARCOs execution.")
    par_group.add_argument(
        '--parallel',
        type=str,
        default=None,
        help=
        "run MARCO in parallel, specifying a comma-delimited list of modes selected from: 'MUS', 'MCS', 'MCSonly' -- e.g., \"MUS,MUS,MCS,MCSonly\" will run four separate threads: two MUS biased, one MCS biased, and one with a CAMUS-style MCS enumerator."
    )
    par_group.add_argument(
        '--same-seeds',
        action='store_true',
        help=
        "use same seeds for all children (still randomized but with all seeds of value 1."
    )
    par_group.add_argument(
        '--all-randomized',
        action='store_true',
        help=
        "randomly initialize *all* children in parallel mode (default: first thread is *not* randomly initialized, all others are)."
    )
    comms_group = par_group.add_mutually_exclusive_group()
    comms_group.add_argument(
        '--comms-disable',
        action='store_true',
        help=
        "disable the communications between children (i.e., when the master receives a result from a child, it won't send to other children)."
    )
    comms_group.add_argument(
        '--comms-ignore',
        action='store_true',
        help=
        "send results out to children, but do not *use* the results in children (i.e., do not add blocking clauses based on them) -- used only for determining cost of communication."
    )

    # Experimental / Research arguments
    exp_group = parser.add_argument_group(
        'Experimental / research options',
        "These can typically be ignored; the defaults will give the best performance."
    )
    exp_group.add_argument(
        '--mcs-only',
        action='store_true',
        default=False,
        help="enumerate MCSes only using a CAMUS-style MCS enumerator.")
    exp_group.add_argument(
        '--rnd-init',
        type=int,
        nargs='?',
        const=1,
        default=
        None,  # default = val if --rnd-init not specified; const = val if --rnd-init specified w/o a value
        help=
        "only used if *not* using --parallel: initialize variable activity in solvers to random values (optionally specify a random seed [default: 1 if --rnd-init specified without a seed])."
    )
    exp_group.add_argument(
        '--improved-implies',
        action='store_true',
        help=
        "use improved technique for Map formula implications (implications under assumptions) [default: False, use only singleton MCSes as hard constraints]"
    )
    exp_group.add_argument(
        '--dump-map',
        nargs='?',
        type=argparse.FileType('w'),
        help="dump clauses added to the Map formula to the given file.")
    solver_group = exp_group.add_mutually_exclusive_group()
    solver_group.add_argument(
        '--force-minisat',
        action='store_true',
        help=
        "use Minisat in place of MUSer2 for CNF (NOTE: much slower and usually not worth doing!)"
    )
    solver_group.add_argument(
        '--pmuser',
        type=int,
        default=None,
        help=
        "use MUSer2-para in place of MUSer2 to run in parallel (specify # of threads.)"
    )
    exp_group.add_argument(
        '--nomax',
        action='store_true',
        help=
        "perform no model maximization whatsoever (applies either shrink() or grow() to all seeds)"
    )

    args = parser.parse_args()

    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit(1)

    if args.check_muser:
        try:
            muser_path = os.path.join(
                os.path.dirname(os.path.realpath(__file__)), 'muser2-para')
            utils.check_executable("MUSer2", muser_path)
        except utils.ExecutableException as e:
            print(str(e))
            sys.exit(1)
        sys.exit(0)

    if args.smt and args.infile == sys.stdin:
        sys.stderr.write(
            "SMT cannot be read from STDIN.  Please specify a filename.\n")
        sys.exit(1)

    return args