def create_index(self, field_to_index: str) -> None: path = f"{DB_ROOT}/{self.name}" index = {} for i in range(1, self.meta_data['files_num'] + 1): data = files.load(f"{path}/{i}.json") for k in data.keys(): record.add(index, data[k][field_to_index], i) files.dump(index, f"{path}/{field_to_index}_index.json")
def update_record(self, key: Any, values: Dict[str, Any]) -> None: path = f"{DB_ROOT}/{self.name}" meta_data = files.load(f"{path}/meta_data.json") i = record.search_index( f"{DB_ROOT}/{self.name}/{meta_data['key']}_index.json", key)[0] data = files.load(f"{path}/{i}.json") for k in values.keys(): data[str(key)][k] = values[k] files.dump(data, f"{path}/{i}.json")
def proceed(N, t1, t2, opcje_uslug): if t1 is None: t1 = t2 - datetime.timedelta(days=365 * 3) if t2 is None: t2 = t1 + datetime.timedelta(days=365 * 3) print("### Starting to generate ###\r\n") start_time = time.time() lists = generator.generuj(N=int(N), t1=t1, t2=t2, opcje_uslug=opcje_uslug) elapsed_time = time.time() - start_time print("### Generating has been finished ###") print("Total time to complete: {time}s\r\n".format(time=elapsed_time)) saved = {} print("### Saving objects to .bulk files ###") for key, val in lists.items(): n = margin[key] saved[key] = files.dump(key, list(val.values())[n:]) margin[key] = len(val) print("### Saving objects has been finished ###") print("### Bulk inserting to DB ###\r\n") for key, filepath in saved.items(): print("{key}... ".format(key=key), end='', flush=True) start_time = time.time() conn.execute_bulk(key, filepath) elapsed_time = time.time() - start_time print("Finished; time: {time}".format(time=elapsed_time)) print("### Bulk insertion has been finished ###")
def create_table(self, table_name: str, fields: List[db_api.DBField], key_field_name: str) -> DBTable: fields_names = [f.name for f in fields] if key_field_name not in fields_names: raise ValueError() os.mkdir(f"./{DB_ROOT}/{table_name}") meta_data = { "must": fields_names, "key": key_field_name, "count": 0, "files_num": 1 } with (DB_ROOT / f"{table_name}/meta_data.json").open("w") as the_file: json.dump(meta_data, the_file) files.dump({}, f"{DB_ROOT}/{table_name}/1.json") table = DBTable(table_name, fields, key_field_name, meta_data) table.create_index(key_field_name) return table
def insert_record(self, values: Dict[str, Any]) -> None: path = f"{DB_ROOT}/{self.name}" data = files.load(f"{path}/{self.meta_data['files_num']}.json") index = files.load(f"{path}/{self.meta_data['key']}_index.json") if index.get(str(values[self.meta_data["key"]])) is not None: raise ValueError() if len(data.keys()) >= 1000: files.dump({values[self.meta_data["key"]]: values}, f"{path}/ {self.meta_data['files_num'] + 1}.json") else: data[values[self.meta_data["key"]]] = values files.dump(data, f"{path}/{self.meta_data['files_num']}.json") record.add(index, values[self.meta_data["key"]], self.meta_data['files_num']) files.dump(index, f"{path}/{self.meta_data['key']}_index.json") self.meta_data["count"] += 1 files.dump(self.meta_data, f"{path}/meta_data.json")
def delete_record(self, key: Any) -> None: path = f"{DB_ROOT}/{self.name}" index = files.load(f"{path}/{self.meta_data['key']}_index.json") if (index.get(str(key)) is None) or len(index[str(key)]) == 0: raise ValueError() data = files.load(f"{path}/{index[str(key)][0]}.json") data.pop(str(key)) files.dump(data, f"{path}/{index[str(key)]}.json") self.meta_data["count"] -= 1 files.dump(self.meta_data, f"{path}/meta_data.json") index.pop(str(key)) files.dump(index, f"{path}/{self.meta_data['key']}_index.json")