Exemple #1
0
def main(input_file, output_file):
    print "Starting script"
    Puzzles = open(input_file, 'r')
    if Puzzles is None:
        helpers.eprint("Could not open puzzles file.")
        exit(1)
    curr_puzzle = getNextPuzzle(Puzzles)
    data = []
    Solved_Puzzles = open(output_file, 'w')
    while curr_puzzle is not None:
        puzzle_data = curr_puzzle[1]
        cnf_file_name = curr_puzzle[0].replace(" ", "") + "_CNF.txt"
        sat_file_name = curr_puzzle[0].replace(" ", "") + "_SAT.txt"
        cnf = makeCNF.main(puzzle_data)
        with open(cnf_file_name, "w") as f:
            f.write(cnf)
        temp_file_name = "SAT_statistics.txt"
        # command = "minisat "+ cnf_file_name + " " + sat_file_name + " > " + temp_file_name
        command = "cryptominisat5 " + cnf_file_name + " > " + temp_file_name
        start_time = time.time()
        os.system(command)
        finish_time = time.time()
        #with open(temp_file_name, "r") as f:
        #    minisat_output = f.read()
        # data_entry = parseMiniSatOutput(minisat_output)
        data_entry = dict({"time": finish_time - start_time})
        data_entry["Name"] = curr_puzzle[0].strip()
        data.append(data_entry)
        #with open(sat_file_name, "r") as f:
        #    sat = f.read()
        #solved_puzzle = solveSudoku.main(sat)
        # Solved_Puzzles.write(curr_puzzle[0] + "\n")
        # Solved_Puzzles.write(solved_puzzle)
        # Solved_Puzzles.write("Time to solve: " + str(data_entry["cpu_time"]) + " s\n")
        # Solved_Puzzles.write("Memory used to solve: " + str(data_entry["memory_used"]) + " MB\n\n")
        os.system("rm " + cnf_file_name + " " + temp_file_name)
        curr_puzzle = getNextPuzzle(Puzzles)
        if curr_puzzle is None:
            print "Puzzles processed:"
            print len(data)

    Puzzles.close()
    Solved_Puzzles.close()

    num_entries = len(data)
    MB_used = 0
    CPU_used = 0
    quickest_time = 1
    for entry in data:
        #MB_used += entry["memory_used"]
        CPU_used += entry["time"]

    #MB_used = MB_used/num_entries
    CPU_used = CPU_used / num_entries
    print "Average CPU usage: " + str(CPU_used) + " s"
Exemple #2
0
def parseMiniSatOutput(output):
    # print output
    mem_used_re = re.compile(r"Memory used\s*:\s*(\d*.\d*)\s*MB")
    cpu_time_re = re.compile(r"CPU time\s*:\s*(\d*.\d*)\s*s")
    mem_used_match = mem_used_re.search(output)
    cpu_time_match = cpu_time_re.search(output)
    if not mem_used_match or not cpu_time_match:
        traceback.print_stack()
        helpers.eprint("Could not get Memory time or CPU time")
        exit(1)
    mem_used = float(mem_used_match.group(1))
    cpu_time = float(cpu_time_match.group(1))
    return dict({"memory_used": mem_used, "cpu_time": cpu_time})
def convert_puzzle(puzzle):
    puzzle = puzzle.strip()
    if len(puzzle) != 81:
        helpers.eprint("Improper puzzle length: " + str(len(puzzle)))
        print(puzzle[len(puzzle) - 1])
        exit(1)

    return_val = puzzle[:9] + "\n"
    return_val += puzzle[9:18] + "\n"
    return_val += puzzle[18:27] + "\n"
    return_val += puzzle[27:36] + "\n"
    return_val += puzzle[36:45] + "\n"
    return_val += puzzle[45:54] + "\n"
    return_val += puzzle[54:63] + "\n"
    return_val += puzzle[63:72] + "\n"
    return_val += puzzle[72:81] + "\n"
    return return_val
Exemple #4
0
# if __name__ == "__main__":
# parse input
parser = argparse.ArgumentParser()
parser.add_argument('config_file', help='JSON configuration file')
parser.add_argument('schema_file', help='JSON schema file')
args = parser.parse_args()
# Logging
logging.debug("config file: %s", args.config_file)
logging.debug("schema file: %s", args.schema_file)

json_validation = ExtendedValidator(args.config_file, args.schema_file)

success, config = json_validation.extended_validator()
logging.debug("configuration: %s", config)
if not success:
    he.eprint(config)
    exit(-1)

# FFT

grid_map = img_as_ubyte(io.imread(config["input_map"]))
rose = structure_extraction(grid_map, peak_height=config["peak_extraction_parameters"]["peak_height"],
                            smooth=config["peak_extraction_parameters"]["smooth_histogram"],
                            sigma=config["peak_extraction_parameters"]["sigma"])
rose.process_map()

filter_level = config["filtering_parameters"]["filter_level"]

rose.simple_filter_map(filter_level)

rose.generate_initial_hypothesis(type='simple', min_wall=5)
Exemple #5
0
def main(in_string):

    # Encoding sudoku puzzle into string
    lines = in_string.splitlines()
    if len(lines) != 9:
        helpers.eprint("Invalid Sudoku puzzle in input file")
        sys.exit()
    encoded_puzzle = ""
    for i in range(9):
        encoded_puzzle += lines[i]
        if len(lines[i]) != 9:
            helpers.eprint("Invalid Sudoku puzzle in input file")


    # Creating matrix from encoded puzzle
    matrix = [[0 for x in range(9)] for y in range(9)]
    for x in range(9):
        for y in range(9):
            # This will fail if we use ? etc as wild character
            temp = encoded_puzzle[x*9+y]
            if(temp in ['.','*','?']):
                temp = 0
            try:
                matrix[x][y] = int(temp)
            except:
                helpers.eprint("Improper character used in provided puzzle")
                sys.exit()

    # Write first line of CNF
    variables = 0
    cnf = ""
    for x in range(9):
        for y in range(9):
            if matrix[x][y] != 0:
                variables += 1
    cnf += "p cnf 729 " + str(8829 + variables) + "\n"


    # Write clauses for existing sudoku values
    for x in range(9):
        for y in range(9):
            if matrix[x][y] not in [0,'.','*','?']:
                cnf += str(to_nineary(x+1, y+1, int(matrix[x][y]))) + " 0\n"

    # Write clauses for constraint: There is at least one number in each entry
    # ie. There is at least a number [1-9] in each cell of the sudoku
    for x in range(1, 10):
        for y in range(1, 10):
            for z in range(1, 10):
                cnf += str(to_nineary(x, y, z)) + " "
            cnf += " 0\n"

    # Write clauses for constraint: Each number appears at most once in each column
    for x in range(1, 10):
        for z in range(1, 10):
            for y in range(1, 9):
                for i in range(y + 1, 10):
                    cnf += "-" + str(to_nineary(x, y, z)) + " -" + str(to_nineary(x, i, z)) + " 0\n"

    # Write clauses for constraint: Each number appears at most once in each row
    for y in range(1, 10):
        for z in range(1, 10):
            for x in range(1,9):
                for i in range(x + 1, 10):
                    cnf += "-" + str(to_nineary(x, y ,z)) + " -" + str(to_nineary(i, y, z)) + " 0\n"

    # Write clauses for constraint: Each number appears at most once in each 3x3 sub-grid
    for z in range(1,10):
        for i in range(0,3):
            for j in range(0,3):
                for x in range(1,4):
                    for y in range(1,4):

                        for k in range(y + 1, 4):
                            cnf += "-" + str(to_nineary((3*i) + x, (3*j) + y, z)) + " -" + str(to_nineary((3*i) + x, (3*j) + k, z)) + " 0\n"

                        for k in range(x + 1, 4):
                            for l in range(1, 4):
                                cnf += "-" + str(to_nineary((3*i) + x, (3*j) + y, z)) + " -" + str(to_nineary((3*i) + k, (3*j) + l, z)) + " 0\n"


    # Done writing clauses. Run minisat solver on completed CNF
    return cnf
Exemple #6
0
                    cnf += "-" + str(to_nineary(x, y ,z)) + " -" + str(to_nineary(i, y, z)) + " 0\n"

    # Write clauses for constraint: Each number appears at most once in each 3x3 sub-grid
    for z in range(1,10):
        for i in range(0,3):
            for j in range(0,3):
                for x in range(1,4):
                    for y in range(1,4):

                        for k in range(y + 1, 4):
                            cnf += "-" + str(to_nineary((3*i) + x, (3*j) + y, z)) + " -" + str(to_nineary((3*i) + x, (3*j) + k, z)) + " 0\n"

                        for k in range(x + 1, 4):
                            for l in range(1, 4):
                                cnf += "-" + str(to_nineary((3*i) + x, (3*j) + y, z)) + " -" + str(to_nineary((3*i) + k, (3*j) + l, z)) + " 0\n"


    # Done writing clauses. Run minisat solver on completed CNF
    return cnf

if __name__ == "__main__":
    input_file = sys.argv[1]
    output_file = sys.argv[2]
    with open(input_file, "r") as in_f:
        input_data = in_f.read()
    if len(sys.argv) != 3:
        helpers.eprint("Invalid format. Please use format: \"python sud2sat.py input.txt output.txt\"")
        sys.exit()
    with open(output_file, "w") as out_f:
        out_f.write(main(input_data))
        out_f.close()
Exemple #7
0
def roll():
    cmd = request.form['text']
    cmd = cmd.replace(" ", "")
    roll = ""
    operator = "+"
    modifier = 0

    if "+" in cmd:
        roll, modifier = cmd.split("+")
    elif "-" in cmd:
        operator = "-"
        roll, modifier = cmd.split("-")
    else:
        roll = cmd

    number, sides = roll.split("d")
    modifier = int(modifier)
    number = int(number)
    sides = int(sides)
    roll_result = 0
    for x in range(0, number):
        roll_result += random.randint(1, sides)
    roll_plus_mods = "{} {} {}".format(str(roll_result), operator,
                                       str(modifier))
    result = roll_result + modifier if operator == "+" else roll_result - modifier

    final_result = "*{} rolls a {}* _({} = {})_".format(
        request.form['user_name'], result, cmd, roll_plus_mods)

    bot_token = 'xoxb-71214811344-NPIQodp1purOHDsTQaMUcC6N'
    s = Slacker(bot_token)
    bot_entry = [
        x for x in s.users.list().body['members'] if 'U236APVA4' in x['id']
    ][0]
    bot_name = bot_entry['name']

    channel = request.form['channel_id']

    s.chat.post_message(
        channel,
        final_result,
        username=bot_name,
        icon_url=
        "https://avatars.slack-edge.com/2016-08-19/71252185058_c239c22e9866f8a9d48f_48.png"
    )

    eprint("sent to slack")
    eprint(request.form)

    #  send_to_slack = {
    #      "text": final_result,
    #  }

    #  slack_headers = {"content-type": "application/json"}

    # req = requests.post(
    #     "https://hooks.slack.com/services/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",
    #     data = json.dumps(send_to_slack),
    #     headers = slack_headers,
    #  )

    return ('', 204)
Exemple #8
0
            print "Puzzles processed:"
            print len(data)

    Puzzles.close()
    Solved_Puzzles.close()

    num_entries = len(data)
    MB_used = 0
    CPU_used = 0
    quickest_time = 1
    for entry in data:
        #MB_used += entry["memory_used"]
        CPU_used += entry["time"]

    #MB_used = MB_used/num_entries
    CPU_used = CPU_used / num_entries
    print "Average CPU usage: " + str(CPU_used) + " s"
    #print "Average memory usage: " + str(MB_used) + " MB"

    # os.system("minisat "+ cnf_file_name + " " + sat_file_name)


if __name__ == "__main__":
    if len(sys.argv) != 3:
        helpers.eprint(
            "Command format: python master.py <input_file_name> <output_file_name>"
        )
        exit(1)
    input_file = sys.argv[1]
    output_file = sys.argv[2]
    main(input_file, output_file)
Exemple #9
0
def BFS_crawl(initial_urls,
              depth_limit,
              breadth_limit,
              save=True,
              pipeline=default_pipeline):
    '''
    Crawl the web in a Breadth First Search manner, starting from multiple
    roots contained in initial_urls. Iteratively visit each webpage, use the classifier to
    generate True / False prediction.
    Follow at max. <breadth_limit> links from each webpage, for a depth of max.
    <depth_limit>. Running time exponential in those parameters.

    Return the graph of visited webpages'''
    # Queue containing: (url, depth)
    queue = deque()
    for initial_url in initial_urls:
        queue.append((initial_url, 0, None))

    G = nx.DiGraph()
    seen_urls = set()

    print("BFS started with params: ")
    print("Depth_limit: {}\nBreadth_limit: {}.".format(depth_limit,
                                                       breadth_limit))

    # Assuming download+sleep=1s
    estimated_time = len(initial_urls) * np.power(breadth_limit,
                                                  depth_limit + 1)
    print("Estimated time: {}s".format(estimated_time))
    start_time = time.time()

    while (len(queue) > 0):
        url, depth, parent_node = queue.popleft()

        print(".", end="")

        # Add url to seen_urls here, to avoid infinite loop (if the page links to itself)
        seen_urls.add(url)

        # Fetch from cache or download the page
        if is_cached(url):
            text = get_cached(url)
            status_code = 200
        else:
            try:
                # Sleep to avoid getting banned
                time.sleep(0.5)

                r = requests.get(url)
                status_code = r.status_code
                text = r.text

                # TODO try if cache page works
                cache_page(url, text)
            except Exception as e:
                eprint("Exception while requesting {}".format(url))
                eprint(e)
                status_code = -1

        if status_code != 200:
            node = Node(url, Node.status["fail"], 0)
            G.add_node(node)
        else:
            try:
                soup = BeautifulSoup(text, "lxml")
                visible_text = extract_visible(soup)
            except Exception as e:
                eprint("Exception while parsing {}".format(url))
                eprint(e)
                continue

            # Predict label & get strength of prediction
            label = default_pipeline.predict([visible_text])[0]
            decision_func = default_pipeline.decision_function([visible_text
                                                                ])[0]

            node = Node(url, Node.status[label], decision_func)
            G.add_node(node)

            if depth < depth_limit:
                # Get outgoing url in their absolute form
                out_urls = []
                for a in soup.find_all("a"):
                    try:
                        out_urls.append(urljoin(url, a.get('href', '')))
                    except:
                        # URL is discarded is could not get absolute form
                        print("Exception while joining {} with {}".format(
                            url, a.get('href', '')))

                # Remove already seen urls
                out_urls = [
                    out_url for out_url in out_urls if out_url not in seen_urls
                ]

                # Keeping only <breadth_limit> out_urls
                if len(out_urls) > breadth_limit:
                    out_urls = random.sample(out_urls, breadth_limit)

                queue.extend(
                    (out_url, depth + 1, node) for out_url in out_urls)

        if parent_node is not None:
            G.add_edge(parent_node, node)

    print("Crawling finished after {}s".format(time.time() - start_time))

    # Pickle file if asked
    if save:
        save_path = os.path.join(
            'saved', 'graphs', "{}-depth:{}-breadth:{}.pkl".format(
                url_to_filename(initial_urls[0]), depth_limit, breadth_limit))

        nx.write_gpickle(G, save_path)
        print("Graph saved under {}".format(save_path))

    return G