def insert_record(self, values: Dict[str, Any]) -> None: data = json_func.read_from_json(f"{db_api.DB_ROOT}/db.json") primary_key = data[self.name]["key_field_name"] flag = None try: flag = self.get_record(values[primary_key]) except ValueError: new_record = { values[primary_key]: {k: str(v) for k, v in values.items() if k != primary_key} } if data[self.name]["num_of_lines"] % 10 == 0 and data[ self.name]["num_of_lines"] != 0: data[self.name]["num_of_files"] += 1 json_func.write_to_json( f"{db_api.DB_ROOT}/{self.name}{data[self.name]['num_of_files']}.json", new_record) else: json_func.add_line_to_json( f"db_files/{self.name}{data[self.name]['num_of_files']}.json", values[primary_key], new_record[values[primary_key]]) data[self.name]["num_of_lines"] += 1 json_func.write_to_json(f"{db_api.DB_ROOT}/db.json", data) update_insert_indexes( data, self.name, values, f"db_files/{self.name}{data[self.name]['num_of_files']}.json") if flag is not None: raise ValueError
def insert_new_record(values, primary_key, name, db_meta_data): new_record = { values[primary_key]: {k: str(v) for k, v in values.items() if k != primary_key} } if db_meta_data[name]['num_of_lines'] % 10 == 0 and db_meta_data[name][ 'num_of_lines'] != 0: if file_exists( f"db_files/{name}{db_meta_data[name]['num_of_files'] + 1}.json" ): raise ValueError db_meta_data[name]['num_of_files'] += 1 write_to_json( f"db_files/{name}{db_meta_data[name]['num_of_files']}.json", new_record) else: add_to_json( f"db_files/{name}{db_meta_data[name]['num_of_files']}.json", new_record) db_meta_data[name]['num_of_lines'] += 1 update_db_meta_data(db_meta_data)
def update_insert_indexes(data, name, values, path): for index in data[name]["indexes"]: json_data = json_func.read_from_json( f'{db_api.DB_ROOT}/{name}IndexBy{index}.db') json_data[str(values[index])] = path json_func.write_to_json(f'{db_api.DB_ROOT}/{name}IndexBy{index}.db', json_data)
def delete_records(self, criteria: List[db_api.SelectionCriteria]) -> None: data = json_func.read_from_json(f"{db_api.DB_ROOT}/db.json") num_of_files = data[self.name]["num_of_files"] primary_key = data[self.name]["key_field_name"] for file in range(num_of_files): file_data = json_func.read_from_json(f"{db_api.DB_ROOT}/{self.name}{file + 1}.json") keys_to_delete = [] for key, value in file_data.items(): flag = 0 for c in criteria: if c.field_name == primary_key: if key in file_data.keys(): if not ops[c.operator](int(key), int(c.value)): flag = 1 break elif key in file_data.keys(): if not ops[c.operator](int(value[c.field_name]), int(c.value)): flag = 1 break if not flag: keys_to_delete.append(key) for key in keys_to_delete: del file_data[key] data[self.name]["num_of_lines"] -= 1 update_delete_indexes(data, self.name, key) json_func.write_to_json(f"{db_api.DB_ROOT}/{self.name}{file + 1}.json", file_data) json_func.write_to_json(f"{db_api.DB_ROOT}/db.json", data)
def update_delete_indexes(data, name, index): if index in data[name]["indexes"]: json_data = json_func.read_from_json( f'{db_api.DB_ROOT}/{name}IndexBy{index}.db') del json_data[str(index)] json_func.write_to_json(f'{db_api.DB_ROOT}/{name}IndexBy{index}.db', json_data)
def create_index(self, field_to_index: str) -> None: index = {} data = json_func.read_from_json(f"{db_api.DB_ROOT}/db.json") if field_to_index in data[self.name]["indexes"]: print("index exist") return data[self.name]["indexes"].append(field_to_index) json_func.write_to_json(f"{db_api.DB_ROOT}/db.json", data) num_of_files = data[self.name]["num_of_files"] primary_key = data[self.name]["key_field_name"] if primary_key == field_to_index: for file in range(num_of_files): path = f"{db_api.DB_ROOT}/{self.name}{file + 1}.json" the_file = open(path) json_data = json.load(the_file) for k in json_data.keys(): index[k] = path else: for file in range(num_of_files): path = f"{db_api.DB_ROOT}/{self.name}{file + 1}.json" the_file = open(path) json_data = json.load(the_file) for v in json_data.values(): index[v[field_to_index]] = path json_func.write_to_json(f'{db_api.DB_ROOT}/{self.name}IndexBy{field_to_index}.db', index)
def delete_table(self, table_name: str) -> None: num_of_files = json_func.delete_table_from_json(table_name) for file in range(num_of_files): path = f"db_files/{table_name}{file + 1}.json" os.remove(path) data = json_func.read_from_json(f"db_files/db.json") data["num_of_tables"] -= 1 json_func.write_to_json(f"db_files/db.json",data)
def delete_record(self, key: Any) -> None: data = json_func.read_from_json(f"{db_api.DB_ROOT}/db.json") primary_key = data[self.name]["key_field_name"] json_path = get_indexes(f'{db_api.DB_ROOT}/{self.name}IndexBy{primary_key}.db', key) flag = json_func.delete_if_apear(json_path, key) if flag == 0: raise ValueError data[self.name]["num_of_lines"] -= 1 update_delete_indexes(data, self.name, key) json_func.write_to_json(f"{db_api.DB_ROOT}/db.json", data)
def update_record(self, key: Any, values: Dict[str, Any]) -> None: data = json_func.read_from_json(f"{db_api.DB_ROOT}/db.json") primary_key = data[self.name]["key_field_name"] json_path = get_indexes(f'{db_api.DB_ROOT}/{self.name}IndexBy{primary_key}.db', key) json_data = json_func.read_from_json(json_path) for k in json_data.keys(): if k == str(key): for key, value in values.items(): json_data[k][key] = value json_func.write_to_json(json_path, json_data) return None raise ValueError
def del_if_record_appear(path, key): file_data = read_from_json(path) record_exist = False for k in file_data.keys(): if k == str(key): record_exist = True del_key = k break if record_exist: del file_data[del_key] write_to_json(path, file_data) return record_exist
def update_record(self, key: Any, values: Dict[str, Any]) -> None: db_meta_data = read_db_meta_data() num_of_files = db_meta_data[self.name]['num_of_files'] for file_num in range(num_of_files): file_data = read_from_json( f"db_files/{self.name}{file_num + 1}.json") for k in file_data.keys(): if k == str(key): for field, val in values.items(): file_data[k][field] = val write_to_json(f"db_files/{self.name}{file_num + 1}.json", file_data) return None raise ValueError
def init_tree(trie, data): print("preparing your sources, please wait...") for f in listdir( 'technology_texts/python-3.8.4-docs-text/python-3.8.4-docs-text/c-api' )[:1]: path = f"technology_texts/python-3.8.4-docs-text/python-3.8.4-docs-text/c-api/{f}" my_file = open(path, "r", encoding="utf8") index = 1 for line, sentence in enumerate(my_file): if sentence != " ": data[index] = auto_complete_data(sentence, f"{path}/{line}") insert_to_trie(data, trie, normal_form(sentence), index) index += 1 write_to_json("trie.json", trie) write_to_json("data.json", data)
def create_table(self, table_name: str, fields: List[db_api.DBField], key_field_name: str, DB_BACKUP_ROOT=None) -> DBTable: if key_field_name not in [field.name for field in fields]: raise ValueError d = { "fields": convert_from_dbfields(fields), "key_field_name": key_field_name, "num_of_lines": 0, "num_of_files": 1, "indexes": [] } json_func.write_to_json(f"db_files/{table_name}1.json", {}) json_func.add_table_to_json("db_files/db.json", table_name, d) table = DBTable(table_name, fields, key_field_name) for field in fields: if field.name == key_field_name: table.create_index(field.name) return table
def delete_records(self, criteria: List[SelectionCriteria]) -> None: db_meta_data = read_db_meta_data() num_of_files = db_meta_data[self.name]['num_of_files'] primary_key = db_meta_data[self.name]['key_field_name'] for file_num in range(num_of_files): keys_to_delete = [] file_data = read_from_json( f"db_files/{self.name}{file_num + 1}.json") for key, value in file_data.items(): if meets_all_the_criteria(file_data, key, value, primary_key, criteria): keys_to_delete.append(key) for key in keys_to_delete: del file_data[key] db_meta_data[self.name]['num_of_lines'] -= 1 write_to_json(f"db_files/{self.name}{file_num + 1}.json", file_data) update_db_meta_data(db_meta_data)
def __init__(self): if not file_exists("db_files/db.json"): write_to_json("db_files/db.json", {"num_of_tables": 0})
def update_db_meta_data(db_meta_data): write_to_json("db_files/db.json", db_meta_data)
def __init__(self): if not os.path.isfile("db_files/db.json"): json_func.write_to_json("db_files/db.json", {"num_of_tables": 0})