def get_record(self, key: Any) -> Dict[str, Any]: path = f"{DB_ROOT}/{self.name}" meta_data = files.load(f"{path}/meta_data.json") i = record.search_index(f"{path}/{meta_data['key']}_index.json", key)[0] data = files.load(f"{path}/{i}.json") return data.get(str(key))
def update_record(self, key: Any, values: Dict[str, Any]) -> None: path = f"{DB_ROOT}/{self.name}" meta_data = files.load(f"{path}/meta_data.json") i = record.search_index( f"{DB_ROOT}/{self.name}/{meta_data['key']}_index.json", key)[0] data = files.load(f"{path}/{i}.json") for k in values.keys(): data[str(key)][k] = values[k] files.dump(data, f"{path}/{i}.json")
def delete_record(self, key: Any) -> None: path = f"{DB_ROOT}/{self.name}" index = files.load(f"{path}/{self.meta_data['key']}_index.json") if (index.get(str(key)) is None) or len(index[str(key)]) == 0: raise ValueError() data = files.load(f"{path}/{index[str(key)][0]}.json") data.pop(str(key)) files.dump(data, f"{path}/{index[str(key)]}.json") self.meta_data["count"] -= 1 files.dump(self.meta_data, f"{path}/meta_data.json") index.pop(str(key)) files.dump(index, f"{path}/{self.meta_data['key']}_index.json")
def lazy_load_dict(dict_path, dictionary_name, source_path, mapper=lambda x: x): if file_exists(dict_path, dictionary_name): dictionary = load(dict_path, dictionary_name) else: dictionary = create_dict(source_path) save(dictionary, dict_path, dictionary_name) return {mapper(k): v for k, v in dictionary.items()}
def classify(self, image): image = OpenCVImages.toString(image) str = self.handle.recognizeFromImageBuffer(image, 5, 0.1) if (str != None): screeReaderHandle = files.load("screenreader", "./screenreader/screenreader.py") screeReaderHandle.say(str) print(str)
def load_level_info(default): try: return load(level_info_path, level_info_file) except: log.warn("Couldn't load level file, returning default level {}".format( default)) return {"level": default}
def delete_records(self, criteria: List[db_api.SelectionCriteria]) -> None: path = f"{DB_ROOT}/{self.name}" for i in range(1, self.meta_data['files_num'] + 1): data = files.load(f"{path}/{i}.json") for k in data.keys(): if record.check(data[k], criteria): self.delete_record(k)
def create_index(self, field_to_index: str) -> None: path = f"{DB_ROOT}/{self.name}" index = {} for i in range(1, self.meta_data['files_num'] + 1): data = files.load(f"{path}/{i}.json") for k in data.keys(): record.add(index, data[k][field_to_index], i) files.dump(index, f"{path}/{field_to_index}_index.json")
def __init__(self): threading.Thread.__init__(self) self.path = None self.capture = False self.recognize = False self.handle = files.load("tensor", "./tensorflow/classify.py") self.panel = None
def onKey(self, event): keyCode = event.GetKeyCode() if keyCode == wx.WXK_SPACE: print ("you pressed the spacebar!") handle = files.load("screenreader", "./screenreader/screenreader.py") handle.say("Hello") elif keyCode == wx.WXK_ESCAPE: self.ShowFullScreen(False) else: event.Skip()
def query_table(self, criteria: List[db_api.SelectionCriteria]) \ -> List[Dict[str, Any]]: path = f"{DB_ROOT}/{self.name}" res = {} for i in range(1, self.meta_data['files_num'] + 1): data = files.load(f"{path}/{i}.json") for k in data.keys(): if record.check(data[k], criteria): res[k] = data[k] return res
def insert_record(self, values: Dict[str, Any]) -> None: path = f"{DB_ROOT}/{self.name}" data = files.load(f"{path}/{self.meta_data['files_num']}.json") index = files.load(f"{path}/{self.meta_data['key']}_index.json") if index.get(str(values[self.meta_data["key"]])) is not None: raise ValueError() if len(data.keys()) >= 1000: files.dump({values[self.meta_data["key"]]: values}, f"{path}/ {self.meta_data['files_num'] + 1}.json") else: data[values[self.meta_data["key"]]] = values files.dump(data, f"{path}/{self.meta_data['files_num']}.json") record.add(index, values[self.meta_data["key"]], self.meta_data['files_num']) files.dump(index, f"{path}/{self.meta_data['key']}_index.json") self.meta_data["count"] += 1 files.dump(self.meta_data, f"{path}/meta_data.json")
def __init__(self, name: str, fields: List[db_api.DBField], key_field_name: str, meta_data=None): self.name = name self.fields = fields self.key_field_name = key_field_name if meta_data is None: meta_data = files.load(f"{DB_ROOT}/{self.name}/meta_data.json") self.meta_data = meta_data
def __init__(self, network=None, link=False): import files import links files.init() links.init() if network: print("Creating network ...") self.network = self.createReseau(network) print("Creating links ...") self.links = self.createLiens() else: print("Loading network ...") self.network, self.links = files.load() """
def saveEvent(type, seconds=0): print("HISTORY - SAVE EVENT") data = FILES.load(FILENAME) record = type + "|" + TIMES.now() + "|" + str(seconds) + "\n" data.append(record) FILES.save(FILENAME, data)
def loadAllEvents(): print("HISTORY - LOAD ALL EVENTS") return FILES.load(FILENAME)
def get_table(self, table_name: str) -> DBTable: meta_data = files.load(f"{DB_ROOT}/{table_name}/meta_data.json") fields = [db_api.DBField(name, str) for name in meta_data["must"]] return DBTable(table_name, fields, meta_data["key"], meta_data)