def on_modified(self, event: Any): try: if event.src_path in self._watches: if not event.is_directory: with open(event.src_path, 'rt') as file: contents = file.read() self._watches[event.src_path] = create_string_digest( contents) else: self._watches[event.src_path] = None except Exception: logger.exception('file observer error')
def __create_or_bind(self, *, db_properties: List[LMDBProperties], versioned: bool, metadata: Dict[str, Any], on_init: Optional[Callable[[bool], None]], bind: bool = True): with transaction_context(self._env, write=True) as (txn, _, _): obj_uuid = txn.get(key=self._encname, db=self._namedb) if obj_uuid: if bind: return self.__bind_or_create(db_properties=db_properties, versioned=versioned, metadata=metadata, on_init=on_init, create=False) raise ObjectExistsError() obj_uuid = uuid.uuid4().bytes assert txn.put(key=self._encname, value=obj_uuid, db=self._namedb) assert txn.put(key=obj_uuid, value=struct.pack('@N', 0), db=self._versdb) basename = str(uuid.uuid4()) descriptor: Descriptor = dict(databases=list( zip([ create_string_digest(''.join([basename, str(i)])) for i in range(len(db_properties)) ], db_properties)), uuid=str(uuid.UUID(bytes=obj_uuid)), versioned=versioned, created=str(datetime.datetime.now()), type=get_qualified_class_name(self), metadata=metadata) assert txn.put(key=obj_uuid, value=orjson.dumps(descriptor), db=self._descdb) for dbuid, props in descriptor['databases']: self._userdb.append( open_database_threadsafe(txn, self._env, dbuid, props, create=True)) self._uuid_bytes = obj_uuid self._versioned = descriptor['versioned'] if on_init: self._create = True on_init(True) self._create = False return None
def get_content_digest(self, path) -> Optional[str]: if not path in self._watches: with open(path, 'rt') as file: contents = file.read() self._watches[path] = create_string_digest(contents) return self._watches[path]
async_limit: Optional[int] = None, metadata: Optional[Dict[str, Any]] = None, disable_sync: Optional[bool] = None, site_uuid: Optional[str] = None, create: bool = True, bind: bool = True): self.__latest: Optional[Tuple[str, Union[bytes, Tuple[str, str]]]] self.__default_sync: bool self.__async_limit: Optional[int] self.__disable_sync: bool if target: module = inspect.getmodule(target) if not hasattr(module, '__file__'): bytecode = cloudpickle.dumps(target) digest = '-'.join(['bytecode', create_string_digest(bytecode)]) else: assert isinstance(module, types.ModuleType) digest = file_observer.get_digest(module.__name__, target.__name__) def load_target(): if target: self._target_function = target if not hasattr(module, '__file__'): if self.__latest is None or self.__latest[0] != digest: self.__latest = (digest, bytecode) else: if self.__latest is None or self.__latest[0] != digest: self.__latest = (digest, (module.__name__,