def append_to_files(start_nodes, goal_nodes, occ_grid, all_path_nodes):
    print("appending to files__")
    assert (len(occ_grid) == len(start_nodes))
    assert (len(all_path_nodes) == len(start_nodes))

    s_file = open("dataset_new/start_nodes.txt", 'a')
    g_file = open("dataset_new/goal_nodes.txt", 'a')
    occ_file = open("dataset_new/occ_grid.txt", 'a')

    np.savetxt(s_file, np.array(start_nodes), delimiter=" ", fmt="%s")
    np.savetxt(g_file, np.array(goal_nodes), delimiter=" ", fmt="%s")
    np.savetxt(occ_file, np.array(occ_grid), delimiter=" ", fmt="%s")
    helper.write_to_file("dataset_new", all_path_nodes)
Exemplo n.º 2
0
def append_to_files(start_nodes, goal_nodes, occ_grid, all_path_nodes):
    assert (len(occ_grid) == len(start_nodes))
    assert (len(all_path_nodes) == len(start_nodes))

    try:
        s_file = open("dataset_23June/start_nodes.txt", 'ab')
        g_file = open("dataset_23June/goal_nodes.txt", 'ab')
        occ_file = open("dataset_23June/occ_grid.txt", 'ab')
    except:
        print("File doesn't exist")
        s_file = open("dataset_23June/start_nodes.txt", 'w')
        g_file = open("dataset_23June/goal_nodes.txt", 'w')
        occ_file = open("dataset_23June/occ_grid.txt", 'w')

    np.savetxt(s_file, np.array(start_nodes), delimiter=" ", fmt="%s")
    np.savetxt(g_file, np.array(goal_nodes), delimiter=" ", fmt="%s")
    np.savetxt(occ_file, np.array(occ_grid), delimiter=" ", fmt="%s")
    helper.write_to_file("dataset_23June", all_path_nodes)
Exemplo n.º 3
0
def search(puzzle):
    print('Solving puzzle #', puzzle['id'])
    visited = dict()
    solution_path = deque()
    search_path = deque()
    visited[str(puzzle['board'])] = 1

    depth_first_search(puzzle, 1, visited, puzzle['board'], solution_path,
                       search_path)

    if len(solution_path) > 0:
        solution_path.insert(0, "0   " + str(puzzle['board']))
    search_path.insert(0, "0   " + str(puzzle['board']))

    helper.write_to_file(
        str(puzzle['id']) + "_dfs", search_path, solution_path)
    print('>> Solved')
    print('Path length: ', len(search_path), '\n')
Exemplo n.º 4
0
    else:
        map_directory = 'maps'

    file_path_map1 = os.path.join(working_directory,
                                  map_directory + '/map1.txt')
    file_path_map2 = os.path.join(working_directory,
                                  map_directory + '/map2.txt')
    file_path_map3 = os.path.join(working_directory,
                                  map_directory + '/map3.txt')

    maze_map_map1 = []
    with open(file_path_map1) as f1:
        maze_map_map1 = f1.readlines()

    maze_map_map2 = []
    with open(file_path_map2) as f2:
        maze_map_map2 = f2.readlines()

    maze_map_map3 = []
    with open(file_path_map3) as f3:
        maze_map_map3 = f3.readlines()

    depth_first_search(maze_map_map1)  # Parameter: Input file Map as 2D array
    write_to_file("results/dfs_map1")  # Parameter: Output file name

    depth_first_search(maze_map_map2)
    write_to_file("results/dfs_map2")

    depth_first_search(maze_map_map3)
    write_to_file("results/dfs_map3")
    maze_map_map2 = []
    with open(file_path_map2) as f2:
        maze_map_map2 = f2.readlines()

    maze_map_map3 = []
    with open(file_path_map3) as f3:
        maze_map_map3 = f3.readlines()

    start_pos_map1 = get_cell_pos(maze_map_map1, 's')
    start_pos_map2 = get_cell_pos(maze_map_map2, 's')
    start_pos_map3 = get_cell_pos(maze_map_map3, 's')
    goals_pos_map1 = get_cell_pos(maze_map_map1, '*')
    goals_pos_map2 = get_cell_pos(maze_map_map2, '*')
    goals_pos_map3 = get_cell_pos(maze_map_map3, '*')

    for goal_pos_map1 in goals_pos_map1:
        path_map1 = iterative_deepening_depth_first_search(
            maze_map_map1, start_pos_map1, goal_pos_map1)
        write_to_file("iddfs_map1", path_map1)

    for goal_pos_map2 in goals_pos_map2:
        path_map2 = iterative_deepening_depth_first_search(
            maze_map_map2, start_pos_map2, goal_pos_map2)
        write_to_file("iddfs_map2", path_map2)

    for goal_pos_map3 in goals_pos_map3:
        path_map3 = iterative_deepening_depth_first_search(
            maze_map_map3, start_pos_map3, goal_pos_map3)
        write_to_file("iddfs_map3", path_map3)
Exemplo n.º 6
0
def generate_google_manufacturer_xml(template_env, input_file):
    available_extensions = ['.csv', '.xls']
    items = []
    context = {}

    if not check_extension(input_file, available_extensions):
        logging_info(
            'The file extension should be %s.' %
            (','.join(available_extensions)), 'ERROR')
        return

    try:
        name, file_extension = os.path.splitext(input_file)
        ci = {}
        # The ci will take column index like this
        # {
        #     'MPN': -1,    # The column index of the MPN field
        #     'Brand Name': -1,
        #     'Item Name': -1,
        #     'GTIN': -1,
        #     'Description': -1,
        #     'Long Description': -1
        # }
        if file_extension == '.csv':
            with open(input_file, 'rU') as csvfile:
                reader = csv.reader(csvfile)
                for idx, item in enumerate(reader):
                    if idx == 0:
                        for i, c in enumerate(item):
                            ci[c] = i
                    else:
                        data = {
                            'id':
                            item[ci[MPN]] if ci[MPN] > -1 else '',
                            'brand':
                            item[ci[BRAND]] if ci[BRAND] > -1 else '',
                            'title':
                            item[ci[TITLE]] if ci[TITLE] > -1 else '',
                            'gtin':
                            item[ci[GTIN]] if ci[GTIN] > -1 else '',
                            'mpn':
                            item[ci[MPN]] if ci[MPN] > -1 else '',
                            'description':
                            item[ci[DESC]] if ci[DESC] > -1 else '',
                            'bullet_points':
                            item[ci[L_DESC]] if ci[L_DESC] > -1 else '',
                        }
                        data['bullet_points'] = generate_bullets(
                            data['bullet_points'])

                        if data['gtin'] == '':
                            if ci[UPC] > -1 and item[ci[UPC]] != '':
                                data['gtin'] = convert_upc_to_gtin(
                                    item[ci[UPC]])
                        items.append(data)
        else:  # .xls file
            logging_info('START CONVERSION')
            # xlrd could not read xls file that generated by PHPExcel
            # so we make file conversion
            input_file_c = convert_xls_file(input_file)
            logging_info('END CONVERSION')

            if input_file_c == '':
                raise Exception('Could not convert xml file')

            wb = xlrd.open_workbook(filename=input_file_c)
            s_names = wb.sheet_names()
            for sn in s_names:
                item_sheet = wb.sheet_by_name(sn)
                for idx, row in enumerate(item_sheet.get_rows()):
                    if idx == 0:
                        for i, c in enumerate(row):
                            ci[c.value] = i
                    else:
                        data = {
                            'id':
                            parse_xls_value(row[ci[MPN]].value)
                            if ci[MPN] > -1 else '',
                            'brand':
                            row[ci[BRAND]].value if ci[BRAND] > -1 else '',
                            'title':
                            row[ci[TITLE]].value if ci[TITLE] > -1 else '',
                            'gtin':
                            parse_xls_value(row[ci[GTIN]].value)
                            if ci[GTIN] > -1 else '',
                            'mpn':
                            parse_xls_value(row[ci[MPN]].value)
                            if ci[MPN] > -1 else '',
                            'description':
                            row[ci[DESC]].value if ci[DESC] > -1 else '',
                            'bullet_points':
                            row[ci[L_DESC]].value if ci[L_DESC] > -1 else '',
                        }
                        data['bullet_points'] = generate_bullets(
                            data['bullet_points'])
                        if data['gtin'] == '':
                            if ci[UPC] > -1 and row[ci[UPC]] != '':
                                data['gtin'] = convert_upc_to_gtin(
                                    row[ci[UPC]].value)
                        items.append(data)
    except Exception as e:
        logging_info(str(e), 'ERROR')
        return

    context['items'] = items

    template = template_env.get_template('GoogleManufacturer.html')
    output_content = template.render(context).encode('utf-8')

    filename = write_to_file(output_content)

    logging_info(filename, 'RESULT_FILE')
    logging_info('google-manufacturer.xml', 'FILE_NAME')
Exemplo n.º 7
0
def main():
    random.seed(1000)
    dense_G = nx.read_graphml("graphs_2d/dense_graph.graphml")
    shallow_G = nx.read_graphml("graphs_2d/shallow_graph.graphml")
    start_nodes = []
    goal_nodes = []

    occ_grid = []
    all_path_nodes = []
    no_env = 15
    no_pp = 10
    no_paths = 5
    knn = 10

    for n in range(no_env):
        print("----------------------------------------------env_no = ", n)
        shallow_G1 = shallow_G.copy()
        obstacles = get_obstacles_posns()
        print("obstacles = ", obstacles)
        occ_grid1 = get_occ_grid(obstacles)
        dense_G1 = dense_G.copy()
        dense_G1 = helper.remove_invalid_edges(dense_G1, obstacles)
        p = 0
        while (p < no_pp):
            dense_G2 = dense_G1.copy()
            print("pp_no = ", p)

            flag1 = False

            while (flag1 == False):
                start_n = random.choice(list(dense_G2.nodes()))
                goal_n = random.choice(list(dense_G2.nodes()))

                start = state_to_numpy(dense_G2.node[start_n]['state'])
                goal = state_to_numpy(dense_G2.node[goal_n]['state'])
                if (not is_trivial(start, goal, obstacles)):
                    if (path_exists(start_n, goal_n, dense_G2)):
                        flag1 = True
                        start_nodes.append(start_n)
                        goal_nodes.append(goal_n)
                        occ_grid.append(occ_grid1)

            flag = 0
            curr_path_nodes = []
            while (flag < 5):
                # print("flag = ", flag)
                if (not is_trivial(start, goal, obstacles)):
                    if (path_exists(start_n, goal_n, dense_G2)):
                        # print("inside if")
                        shallow_G1.add_node(
                            start_n, state=dense_G2.node[start_n]['state'])
                        shallow_G1.add_node(
                            goal_n, state=dense_G2.node[goal_n]['state'])

                        shallow_G1 = connect_knn(shallow_G1, dense_G2, start_n,
                                                 goal_n, knn)
                        if (not path_exists(shallow_G1, 'o' + start_n,
                                            'o' + goal_n)):
                            # print("occ_grid = ", occ_grid1.reshape(20,20))
                            # plot_occ_grid(occ_grid1.reshape(20,20))
                            # return
                            flag += 1
                            path_nodes = get_path_nodes(
                                shallow_G1, dense_G2, start_n, goal_n,
                                obstacles)
                            if (flag == 1 and path_nodes[0] == '-1'):
                                p -= 1
                                flag = 0
                                # print("p is now ", p)
                                break
                            else:
                                curr_path_nodes.append(path_nodes)
                                if (path_nodes[0] == start_n):
                                    path_nodes = path_nodes[1:]

                                print("path_nodes = ", path_nodes)
                            for node in path_nodes:
                                try:
                                    dense_G2.remove_node(node)
                                except:
                                    pass
                            continue
                    else:
                        break
                else:
                    break
                    # print("trivial nodes")
            if (not flag == 0):
                all_path_nodes.append(
                    list(chain.from_iterable(curr_path_nodes)))
            else:
                start_nodes = start_nodes[:-1]
                goal_nodes = goal_nodes[:-1]
                occ_grid = occ_grid[:-1]
            p += 1

    occ_grid = np.array(occ_grid)
    print("occ_grid.shape = ", occ_grid.shape)
    assert (len(occ_grid) == len(start_nodes)), "No of pp mismatch"
    assert (len(all_path_nodes) == len(
        start_nodes)), "len(all_path_nodes) = " + ` len(
            all_path_nodes) ` + " len(start_nodes) = " + ` len(start_nodes) `
    np.savetxt("dataset_new/start_nodes.txt",
               np.array(start_nodes),
               delimiter=" ",
               fmt="%s")
    np.savetxt("dataset_new/goal_nodes.txt",
               np.array(goal_nodes),
               delimiter=" ",
               fmt="%s")
    np.savetxt("dataset_new/occ_grid.txt",
               np.array(occ_grid),
               delimiter=" ",
               fmt="%s")
    helper.write_to_file("dataset_new", all_path_nodes)
    else:
        map_directory = 'maps'

    file_path_map1 = os.path.join(working_directory, map_directory + '/map1.txt')
    file_path_map2 = os.path.join(working_directory, map_directory + '/map2.txt')
    file_path_map3 = os.path.join(working_directory, map_directory + '/map3.txt')

    maze_map_map1 = []
    with open(file_path_map1) as f1:
        maze_map_map1 = f1.readlines()

    maze_map_map2 = []
    with open(file_path_map2) as f2:
        maze_map_map2 = f2.readlines()

    maze_map_map3 = []
    with open(file_path_map3) as f3:
        maze_map_map3 = f3.readlines()

    
    iterative_deepening_depth_first_search(maze_map_map1)
    write_to_file("results/iddfs_map1")

    iterative_deepening_depth_first_search(maze_map_map2)
    write_to_file("results/iddfs_map2")
    
    iterative_deepening_depth_first_search(maze_map_map3)
    write_to_file("results/iddfs_map3")


Exemplo n.º 9
0
def main():
    # reading the command line arguments
    parser = argparse.ArgumentParser(
        description='Read in file paths and other parameters.')
    parser.add_argument('--asm_path',
                        help='path to the asm training files.',
                        default="gs://uga-dsp/project1/data/asm/",
                        type=str)
    parser.add_argument('--bytes_path',
                        help='path to the bytes training files.',
                        default="gs://uga-dsp/project1/data/bytes/",
                        type=str)
    parser.add_argument('--train_files',
                        help='path to the file containing the train files.',
                        default="gs://uga-dsp/project1/files/X_train.txt",
                        type=str)
    parser.add_argument('--test_files',
                        help='path to the file containing the test files.',
                        default="gs://uga-dsp/project1/files/X_test.txt",
                        type=str)
    parser.add_argument('--train_labels',
                        help='path to the file containing the train labels.',
                        default="gs://uga-dsp/project1/files/y_train.txt",
                        type=str)
    parser.add_argument('--test_labels',
                        help='path to the file containing the test labels.',
                        default="gs://uga-dsp/project1/files/y_train.txt",
                        type=str)
    parser.add_argument(
        '--outfile',
        help='path to the output file containing labels for final test set.',
        default="gs://p1-models/RF_Large_Predictions.csv",
        type=str)
    parser.add_argument('--model_path',
                        help='path to the folder for saving the final model.',
                        default="gs://models/",
                        type=str)
    parser.add_argument('--n_parts',
                        help='an integer specifying the number of partitions.',
                        default=50,
                        type=int)
    parser.add_argument('--mem_lim',
                        help='a string specifying the memory limit.',
                        default='10G',
                        type=str)
    parser.add_argument(
        '--max_depth',
        help='maximum depth of the tree in Rnadom Forest Classifier.',
        default=7,
        type=int)
    parser.add_argument(
        '--classifier',
        choices=['lr', 'nb', 'rf'],
        help='classifier algorithm to be used for the classification task.',
        default='rf',
        type=str)
    args = parser.parse_args()

    # initializing the variables
    print("Initializing the variables....")
    asm_path = args.asm_path
    bytes_path = args.bytes_path
    train_files = args.train_files
    test_files = args.test_files
    train_labels = args.train_labels
    test_labels = args.test_labels
    outfile = args.outfile
    model_path = args.model_path
    n_parts = args.n_parts
    memory_limit = args.mem_lim
    max_depth = args.max_depth
    classifier = args.classifier

    sc = spark_session_setup(memory_limit=memory_limit)

    # loading the dataset
    print("loading the dataset...")
    train_df, test_df = load_dataset(sc,
                                     asm_path=asm_path,
                                     bytes_path=bytes_path,
                                     X_train=train_files,
                                     y_train=train_labels,
                                     X_test=test_files,
                                     y_test=test_labels,
                                     n_parts=n_parts)

    # building the model
    print("building the model...")
    stages = build_pipeline(classifier=classifier, max_depth=max_depth)
    pipeline = Pipeline(stages=stages)
    model = pipeline.fit(train_df)

    # saving the model and writing the predictions into the output file
    if model_path:
        model.save(model_path)
    print("generatign the predictions...")
    predictions = model.transform(test_df)
    write_to_file(predictions, outfile)
Exemplo n.º 10
0
def main():
    random.seed(1000)
    dense_G = nx.read_graphml("graphs_2d/dense_graph.graphml")
    shallow_G = nx.read_graphml("graphs_2d/shallow_graph.graphml")
    start_nodes = []
    goal_nodes = []

    occ_grid = []
    all_path_nodes = []
    no_env = 1
    no_pp = 5
    no_paths = 5
    knn = 10

    for n in range(no_env):
        print("----------------------------------------------env_no = ", n)
        dense_G1 = dense_G.copy()
        shallow_G1 = shallow_G.copy()
        obstacles = get_obstacles_posns()
        print("obstacles = ", obstacles)
        occ_grid1 = get_occ_grid(obstacles)
        dense_G1 = helper.remove_invalid_edges(dense_G1, obstacles)

        for p in range(no_pp):
            print("pp_no = ", p)
            flag = False
            while (flag == False):
                start_n = random.choice(list(dense_G1.nodes()))
                goal_n = random.choice(list(dense_G1.nodes()))

                start = state_to_numpy(dense_G1.node[start_n]['state'])
                goal = state_to_numpy(dense_G1.node[goal_n]['state'])
                if (not is_trivial(start, goal, obstacles)):
                    if (path_exists(start_n, goal_n, dense_G1)):
                        shallow_G1.add_node(
                            start_n, state=dense_G1.node[start_n]['state'])
                        shallow_G1.add_node(
                            goal_n, state=dense_G1.node[goal_n]['state'])

                        shallow_G1 = connect_knn(shallow_G1, dense_G1, start_n,
                                                 goal_n, knn)
                        if (not path_exists(shallow_G1, 'o' + start_n,
                                            'o' + goal_n)):
                            start_nodes.append(start_n)
                            goal_nodes.append(goal_n)
                            occ_grid.append(occ_grid1)
                            # print("occ_grid = ", occ_grid1.reshape(20,20))
                            # plot_occ_grid(occ_grid1.reshape(20,20))
                            # return
                            flag = True
                            path_nodes = get_path_nodes(
                                shallow_G1, dense_G1, start_n, goal_n,
                                obstacles)
                            all_path_nodes.append(path_nodes)
                            print("path_nodes = ", path_nodes)
                            continue

    occ_grid = np.array(occ_grid)
    print("occ_grid.shape = ", occ_grid.shape)
    np.savetxt("dataset/start_nodes.txt",
               np.array(start_nodes),
               delimiter=" ",
               fmt="%s")
    np.savetxt("dataset/goal_nodes.txt",
               np.array(goal_nodes),
               delimiter=" ",
               fmt="%s")
    np.savetxt("dataset/occ_grid.txt",
               np.array(occ_grid),
               delimiter=" ",
               fmt="%s")
    helper.write_to_file("dataset", all_path_nodes)
Exemplo n.º 11
0
                                    best_choice['Stock']].tolist()[0]
            stocks_bought.at[i, 'volume'] += xvol
            stocks_bought.at[i, 'pricepaid'] = pricepaid
        else:
            stocks_bought = stocks_bought.append(
                {
                    'stocks': best_choice['Stock'],
                    'volume': xvol,
                    'pricepaid': pricepaid
                },
                ignore_index=True)

    N += 1
    print(N, "----", profit['profit'].iloc[-1], "----")
    ## I SHOULD UPDATE MY BALANCE IN THE END
    balanceSum = update_balance(df, stocks_bought, datetrack,
                                profit['profit'].iloc[-1])
    bal = bal.append({
        'date': datetrack,
        'balance': balanceSum
    },
                     ignore_index=True)
    if break_flag == True:
        break

    daterange = pd.date_range(
        start=datetrack, periods=T)  # πρέπει να επιστρέφω updated datetrack

write_to_file(N, moves_history)
plot_profit_balance(profit, bal)