Exemplo n.º 1
0
    def __init__(self, name, num_columns, key):
        self.name = name
        self.key = key
        self.num_columns = num_columns
        self.bufferpool = BufferPool(self.num_columns)
        #self.page_directory = {}
        self.basePage_dir = {}
        self.tailPage_dir = {
        }  # Store tailRID: tailLocation, so that we can find a tail record
        self.tailRIDTOBaseRID = {}
        self.index = Index(self)
        self.num_PageRanges = 1

        # baseRID and tailRID are initialized to 1, 0 is for deleted record
        self.baseRID = 1
        self.tailRID = 1

        #merge
        self.mergeQ = []
        #self.deallocateQ = []
        self.mergedCount = 0

        thread = threading.Thread(target=self.merge, args=())
        thread.daemon = True
        thread.start()
Exemplo n.º 2
0
 def __init__(self, name, num_columns, key, bufferpool):
     self.name = name
     self.key = key
     # Total columns = num_columns + 4 internal columns (RID, Indirection, Schema, Timestamp)
     self.total_columns = num_columns + 4
     self.num_columns = num_columns
     self.page_directory = {}  #{RID: (pageId, offset)}
     self.index_directory = {}  # {Key: RID}
     self.index = Index(self)  # index object
     # rid_counter keeps track of the current rid to avoid duplicates
     self.rid_counter = 0
     # num_pages keeps track of the pageID we're currently adding to. Initially, this is one.
     self.num_page = 2
     # Put all of the config constants into one variable
     self.config = init()
     # base_pages_internal is a list of page Id's that belong to the internal pages of a base record
     # base_page_external is a list of page Id's that belong to the external pages of a base record
     self.base_pages_internal = [1]
     self.base_pages_external = [2]
     # tail_pages is a list of pageId's that belong to tail pages.
     self.tail_pages = [0]
     # Every table in the database has access to the shared bufferpool object
     self.bufferpool = bufferpool
     # Implementing locks
     self.shared_locks = {}
     self.exclusive_locks = {}
Exemplo n.º 3
0
   def open(self, path):
       self.path = path
       if not os.path.isdir(path):
           os.mkdir(path)
           return
       for file in os.listdir(path):
           # print(file)
           t_path = self.path + '/' + str(file) + '/' + str(file) + '.pkl'
           # print(t_path)
 
           f = open(t_path, 'rb+')
           table = pickle.load(f)
           f.close()
           table.index = Index(table)
           table.buffer = Bufferpool(table)
           table.sem = threading.RLock()
           # obj.index.create_index(0)
           for i in range(table.num_index):
               index_path = self.path + '/' + file + '/table_index_col' + str(i) + '.txt'
               indexObj = open(index_path, 'r+')
               for line in indexObj.readlines():
                   line = line.split('_')
                   for rid in line[1: -1]:
                       table.index.insert(i, int(line[0]), rid)
               indexObj.close()
           # self.tables_directory.append(obj)
           self.append_table(table)
           # print(len(self.tables_directory))
           self.num_table += 1
Exemplo n.º 4
0
 def __init__(self, name, num_columns, key):
     self.name = name
     self.key = key
     self.num_columns = num_columns
     self.page_directory = {}
     self.index = Index(self)
     pass
 def __init__(self, name, num_columns, key):
     self.name = name
     self.key = key
     self.num_columns = num_columns
     self.page_directory = {}
     self.index = Index(self)
     self.pageRanges = [PageRange(self.num_columns)]
     self.keyToBaseRID = {}
     self.baseRID = 1
     self.tailRID = 1
     pass
Exemplo n.º 6
0
 def __init__(self, name, num_columns, key):
     self.name = name
     self.key = key
     self.num_columns = num_columns
     self.page_directory = {
     }  # Replace with index, and all references inside table and query with index API
     self.index = Index(self, self.num_columns)
     self.buffer_pool_range = BufferPoolRange(BUFFER_POOL_SIZE_RANGE,
                                              num_columns)
     # self.page_ranges = []
     # self.page_ranges.append(PageRange(self.num_columns))
     self.curr_page_range = 0
     self.insertRecord([0] * num_columns)
Exemplo n.º 7
0
    def __init__(self, name, num_columns, key):
        self.name = name
        self.key = key
        self.num_columns = num_columns
        self.bufferpool = BufferPool(self.num_columns)
        #self.page_directory = {}
        self.tailPage_lib = {} # Store tailRID: tailLocation, so that we can find a tail record
        self.index = Index(self)
        self.num_PageRanges = 1

        # baseRID and tailRID are initialized to 1, 0 is for deleted record
        self.baseRID = 1
        self.tailRID = 1

        #merge
        self.mergeQ = queue.Queue()
        self.deallocateQ = queue.Queue()
        '''
Exemplo n.º 8
0
 def __init__(self, name, num_columns, Table_key, path):
     self.path = path
     self.name = name
     self.Table_key = Table_key
     self.num_columns = num_columns
     # self.prange_directory = {}  #'col_num': 'page_range_list'
     self.page_directory = {}  #'RID': 'record obj'
     self.origin_base_page_memory = []  # original unmerged and idk???
     self.after_merge_base_page_memory = {
     }  # the new copy that is being  merged,idk???
     self.buffer = Bufferpool(self)
     self.index = Index(self)
     self.prange_num = 0
     self.free_brid = 0
     self.free_trid = 0
     self.rid_list = []
     self.key_list = set()
     self.rif_trash = []  #?????idk
     self.merge_waiting_set = set()  # storing rid which needs to be merged
     self.merge_times = 0
     self.num_index = 0
     self.sem = threading.RLock()
     self.merge_start()
Exemplo n.º 9
0
from template.db import Database
from template.query import Query
from template.config import init
from template.index import Index

from random import choice, randint, sample, seed
init()

db = Database()
db.open('./ECS165')

grades_table = db.create_table('Grades', 5, 0)
index = Index(grades_table)
query = Query(grades_table)

# repopulate with random data
records = {}
seed(3562901)
for i in range(0, 1000):
    key = 92106429 + i
    records[key] = [
        key,
        randint(0, 20),
        randint(0, 20),
        randint(0, 20),
        randint(0, 20)
    ]
    query.insert(*records[key])
keys = sorted(list(records.keys()))
print("Insert finished")