class Notify: """Support for auto-reload changed files""" def notify(self, run=True): file = self.readed_file if not file: # Skip this return if runtime.platform.supportsINotify(): # Use linux inotify API if run: def _notify(ignored, filepath, mask, file=file): if filepath.isfile() and file.endswith(filepath.basename()): log.msg(self, 'change', filepath, humanReadableMask(mask)) # Reload file self.read(filepath.path) self._notifier = INotify() self._notifier.startReading() # Add watch self._notifier.watch(FilePath(file).parent(), mask=IN_MODIFY, callbacks=(_notify, )) else: # Stop watcher pass
class DarkNotify(object): def __init__(self): self.notifier = INotify() self.notifier.startReading() def add(self, folders): self.notifier.watch(FilePath(folders), callbacks=[notified])
def watch(self): self._scheduler = SoftHardScheduler( reactor, self.rescan_soft_delay, self.rescan_hard_delay, self.scan) notifier = INotify() notifier.startReading() notifier.watch(self.wsgi_root, IN_CREATE | IN_MOVED_TO | IN_DELETE | IN_MOVED_FROM | IN_DELETE_SELF | IN_MOVE_SELF | IN_UNMOUNT, callbacks=[self.on_fs_change], autoAdd=True, recursive=True)
def __init__(self, dirUriMap: DirUriMap, patch: PatchCb, getSubgraph: GetSubgraph, addlPrefixes: Dict[str, URIRef]): self.dirUriMap = dirUriMap # {abspath : uri prefix} self.patch, self.getSubgraph = patch, getSubgraph self.addlPrefixes = addlPrefixes self.graphFiles: Dict[URIRef, GraphFile] = {} # context uri : GraphFile self.notifier = INotify() self.notifier.startReading() self.findAndLoadFiles()
class FilesystemNotifier(base.NotifierBase): def __init__(self): super(FilesystemNotifier, self).__init__() self.notifier = INotify() self.notifier.startReading() def setFilename(self, filename): if self._path is not None: self.notifier.ignore(FilePath(self._path)) super(FilesystemNotifier, self).setFilename(filename) if self._path is not None: self.notifier.watch(FilePath(self._path), callbacks=[self.__notify]) def stop(self): if self.notifier is not None: self.notifier.stopReading() self.notifier = None def __notify(self, handle, filepath, mask): myName = self._filename if myName is not None: if filepath.basename() == os.path.split(myName)[-1]: if self._check(myName) and myName: self.stamp = os.stat(myName).st_mtime self.onFileChanged() def onFileChanged(self): raise NotImplementedError
class CodeWatcher(object): def __init__(self, onChange): self.onChange = onChange self.notifier = INotify() self.notifier.startReading() self.notifier.watch( FilePath(effecteval.__file__.replace('.pyc', '.py')), callbacks=[self.codeChange]) def codeChange(self, watch, path, mask): def go(): log.info("reload effecteval") reload(effecteval) self.onChange() # in case we got an event at the start of the write reactor.callLater(.1, go)
def __init__(self, onChange): self.onChange = onChange self.notifier = INotify() self.notifier.startReading() self.notifier.watch( FilePath(effecteval.__file__.replace('.pyc', '.py')), callbacks=[self.codeChange])
def setup_inotify(self): """ Setup the INotifier watcher. """ self.notifier = INotify() self.notifier.startReading() filepath = FilePath(self.fp.name) self.notifier.watch(filepath, callbacks=[self.inotify]) # The setup_done is used mainly in testing self.setup_done.callback(True) self.setup_done = Deferred()
def __init__(self, reactor, config): LOGGER.info("Initializing TreatCam") self.config = config self.reactor = reactor self.agent = Agent(reactor) self.defers = [] self.snapshotActionUrl = "http://localhost:%d/0/action/snapshot" % self.config.motionControlPort self.capturePath = FilePath(config.captureDir) self.lastCaptureLink = self.capturePath.child(TreatCam.LAST_CAPTURE_LINK_NAME) self.lastCaptureTime = None self.lastCaptureName = None self.findPreExistingLastCapture() self.notifier = INotify() self.notifier.startReading() self.notifier.watch(self.capturePath, mask=IN_CREATE, callbacks=[self.notifyCallback])
def watch(self): self._scheduler = SoftHardScheduler(reactor, self.rescan_soft_delay, self.rescan_hard_delay, self.scan) notifier = INotify() notifier.startReading() notifier.watch(self.wsgi_root, IN_CREATE | IN_MOVED_TO | IN_DELETE | IN_MOVED_FROM | IN_DELETE_SELF | IN_MOVE_SELF | IN_UNMOUNT, callbacks=[self.on_fs_change], autoAdd=True, recursive=True)
class WatchedFiles(object): """ find files, notice new files. This object watches directories. Each GraphFile watches its own file. """ def __init__(self, dirUriMap: DirUriMap, patch: PatchCb, getSubgraph: GetSubgraph, addlPrefixes: Dict[str, URIRef]): self.dirUriMap = dirUriMap # {abspath : uri prefix} self.patch, self.getSubgraph = patch, getSubgraph self.addlPrefixes = addlPrefixes self.graphFiles: Dict[URIRef, GraphFile] = {} # context uri : GraphFile self.notifier = INotify() self.notifier.startReading() self.findAndLoadFiles() def findAndLoadFiles(self) -> None: self.initialLoad = True try: for topdir in self.dirUriMap: for dirpath, dirnames, filenames in os.walk(topdir): for base in filenames: p = os.path.join(dirpath, base) # why wasn't mypy catching this? assert isinstance(p, bytes) self.watchFile(p) self.notifier.watch(FilePath(dirpath), autoAdd=True, callbacks=[self.dirChange]) finally: self.initialLoad = False def dirChange(self, watch, path: FilePath, mask): if mask & IN_CREATE: if path.path.endswith((b'~', b'.swp', b'swx', b'.rdfdb-temp')): return log.debug("%s created; consider adding a watch", path) self.watchFile(path.path) def watchFile(self, inFile: bytes): """ consider adding a GraphFile to self.graphFiles inFile needs to be a relative path, not an absolute (e.g. in a FilePath) because we use its exact relative form in the context URI """ if not os.path.isfile(inFile): return inFile = correctToTopdirPrefix(self.dirUriMap, inFile) if os.path.splitext(inFile)[1] not in [b'.n3']: return if b'/capture/' in inFile: # smaller graph for now return # an n3 file with rules makes it all the way past this reading # and the serialization. Then, on the receiving side, a # SyncedGraph calls graphFromNQuad on the incoming data and # has a parse error. I'm not sure where this should be fixed # yet. if b'-rules' in inFile: return # for legacy versions, compile all the config stuff you want # read into one file called config.n3. New versions won't read # it. if inFile.endswith(b"config.n3"): return ctx = uriFromFile(self.dirUriMap, inFile) gf = self._addGraphFile(ctx, inFile) log.info("%s do initial read", inFile) gf.reread() def aboutToPatch(self, ctx: URIRef): """ warn us that a patch is about to come to this context. it's more straightforward to create the new file now this is meant to make the file before we add triples, so we wouldn't see the blank file and lose those triples. But it didn't work, so there are other measures that make us not lose the triples from a new file. Calling this before patching the graph is still a reasonable thing to do, though. """ if ctx not in self.graphFiles: outFile = fileForUri(self.dirUriMap, ctx) # mypy missed the next line because of # https://github.com/python/typeshed/issues/2937 ('str in # bytes' isn't an error) assert b'//' not in outFile, (outFile, self.dirUriMap, ctx) log.info("starting new file %r", outFile) self._addGraphFile(ctx, outFile) def _addGraphFile(self, ctx, path): self.addlPrefixes.setdefault(ctx, {}) self.addlPrefixes.setdefault(None, {}) gf = GraphFile(self.notifier, path, ctx, self.patch, self.getSubgraph, globalPrefixes=self.addlPrefixes[None], ctxPrefixes=self.addlPrefixes[ctx]) self.graphFiles[ctx] = gf fileStats.mappedGraphFiles = len(self.graphFiles) return gf def dirtyFiles(self, ctxs): """mark dirty the files that we watch in these contexts. the ctx might not be a file that we already read; it might be for a new file we have to create, or it might be for a transient context that we're not going to save if it's a ctx with no file, error """ for ctx in ctxs: g = self.getSubgraph(ctx) self.graphFiles[ctx].dirty(g)
def __init__(self, server, **kwargs): BackendStore.__init__(self, server, **kwargs) self.next_id = 1000 self.name = kwargs.get('name', 'my media') self.content = kwargs.get('content', None) if self.content is not None: if isinstance(self.content, str): self.content = [self.content] cl = [] for a in self.content: cl += a.split(',') self.content = cl else: self.content = xdg_content() self.content = [x[0] for x in self.content] if self.content is None: self.content = 'tests/content' if not isinstance(self.content, list): self.content = [self.content] self.content = set([os.path.abspath(x) for x in self.content]) ignore_patterns = kwargs.get('ignore_patterns', []) self.store = {} self.inotify = None if kwargs.get('enable_inotify', 'yes') == 'yes': if INotify: try: self.inotify = INotify() self.inotify.startReading() except Exception as msg: self.error(f'inotify disabled: {msg}') self.inotify = None else: self.info(f'{no_inotify_reason}') else: self.info('FSStore content auto-update disabled upon user request') if kwargs.get('enable_destroy', 'no') == 'yes': self.upnp_DestroyObject = self.hidden_upnp_DestroyObject self.import_folder = kwargs.get('import_folder', None) if self.import_folder is not None: self.import_folder = os.path.abspath(self.import_folder) if not os.path.isdir(self.import_folder): self.import_folder = None self.ignore_file_pattern = re.compile(r'|'.join([r'^\..*'] + list(ignore_patterns))) parent = None self.update_id = 0 if (len(self.content) > 1 or utils.means_true(kwargs.get('create_root', False)) or self.import_folder is not None): UPnPClass = classChooser('root') id = str(self.getnextID()) try: parent = self.store[id] = FSItem( id, parent, 'media', 'root', self.urlbase, UPnPClass, update=True, store=self, ) except Exception as e: self.error( f'Error on setting self.store[id], Error on FSItem: {e}') exit(1) if self.import_folder is not None: id = str(self.getnextID()) self.store[id] = FSItem( id, parent, self.import_folder, 'directory', self.urlbase, UPnPClass, update=True, store=self, ) self.import_folder_id = id for bytesPath in self.content: if isinstance(bytesPath, (list, tuple)): path = str(bytesPath[0]) else: path = str(bytesPath) if self.ignore_file_pattern.match(path): continue try: self.walk(path, parent, self.ignore_file_pattern) except Exception as msg: self.warning(f'on walk of {path!r}: {msg!r}') import traceback self.debug(traceback.format_exc()) self.wmc_mapping.update({'14': '0', '15': '0', '16': '0', '17': '0'}) self.init_completed = True
class FSStore(BackendStore): ''' .. versionchanged:: 0.9.0 Migrated from louie/dispatcher to EventDispatcher ''' logCategory = 'fs_store' implements = ['MediaServer'] description = '''MediaServer exporting files from the file-system''' options = [ { 'option': 'name', 'type': 'string', 'default': 'my media', 'help': 'the name under this MediaServer ' 'shall show up with on other UPnP clients', }, { 'option': 'version', 'type': 'int', 'default': 2, 'enum': (2, 1), 'help': 'the highest UPnP version this MediaServer shall support', 'level': 'advance', }, { 'option': 'uuid', 'type': 'string', 'help': 'the unique (UPnP) identifier for this MediaServer,' ' usually automatically set', 'level': 'advance', }, { 'option': 'content', 'type': 'string', 'default': xdg_content(), 'help': 'the path(s) this MediaServer shall export', }, { 'option': 'ignore_patterns', 'type': 'string', 'help': 'list of regex patterns, matching filenames will be ignored', # noqa: E501 }, { 'option': 'enable_inotify', 'type': 'string', 'default': 'yes', 'help': 'enable real-time monitoring of the content folders', }, { 'option': 'enable_destroy', 'type': 'string', 'default': 'no', 'help': 'enable deleting a file via an UPnP method', }, { 'option': 'import_folder', 'type': 'string', 'help': 'The path to store files imported via an UPnP method, ' 'if empty the Import method is disabled', }, ] def __init__(self, server, **kwargs): BackendStore.__init__(self, server, **kwargs) self.next_id = 1000 self.name = kwargs.get('name', 'my media') self.content = kwargs.get('content', None) if self.content is not None: if isinstance(self.content, str): self.content = [self.content] cl = [] for a in self.content: cl += a.split(',') self.content = cl else: self.content = xdg_content() self.content = [x[0] for x in self.content] if self.content is None: self.content = 'tests/content' if not isinstance(self.content, list): self.content = [self.content] self.content = set([os.path.abspath(x) for x in self.content]) ignore_patterns = kwargs.get('ignore_patterns', []) self.store = {} self.inotify = None if kwargs.get('enable_inotify', 'yes') == 'yes': if INotify: try: self.inotify = INotify() self.inotify.startReading() except Exception as msg: self.error(f'inotify disabled: {msg}') self.inotify = None else: self.info(f'{no_inotify_reason}') else: self.info('FSStore content auto-update disabled upon user request') if kwargs.get('enable_destroy', 'no') == 'yes': self.upnp_DestroyObject = self.hidden_upnp_DestroyObject self.import_folder = kwargs.get('import_folder', None) if self.import_folder is not None: self.import_folder = os.path.abspath(self.import_folder) if not os.path.isdir(self.import_folder): self.import_folder = None self.ignore_file_pattern = re.compile(r'|'.join([r'^\..*'] + list(ignore_patterns))) parent = None self.update_id = 0 if (len(self.content) > 1 or utils.means_true(kwargs.get('create_root', False)) or self.import_folder is not None): UPnPClass = classChooser('root') id = str(self.getnextID()) try: parent = self.store[id] = FSItem( id, parent, 'media', 'root', self.urlbase, UPnPClass, update=True, store=self, ) except Exception as e: self.error( f'Error on setting self.store[id], Error on FSItem: {e}') exit(1) if self.import_folder is not None: id = str(self.getnextID()) self.store[id] = FSItem( id, parent, self.import_folder, 'directory', self.urlbase, UPnPClass, update=True, store=self, ) self.import_folder_id = id for bytesPath in self.content: if isinstance(bytesPath, (list, tuple)): path = str(bytesPath[0]) else: path = str(bytesPath) if self.ignore_file_pattern.match(path): continue try: self.walk(path, parent, self.ignore_file_pattern) except Exception as msg: self.warning(f'on walk of {path!r}: {msg!r}') import traceback self.debug(traceback.format_exc()) self.wmc_mapping.update({'14': '0', '15': '0', '16': '0', '17': '0'}) self.init_completed = True def __repr__(self): return self.__class__.__name__ def release(self): if self.inotify is not None: self.inotify.stopReading() def len(self): return len(self.store) def get_by_id(self, id): # print('get_by_id', id, type(id)) # we have referenced ids here when we are in WMC mapping mode if isinstance(id, str): id = id.split('@', 1)[0] elif isinstance(id, bytes): id = id.decode('utf-8').split('@', 1)[0] elif isinstance(id, int): id = str(id) # try: # id = int(id) # except ValueError: # id = 1000 if id == '0': id = '1000' # print('get_by_id 2', id) try: r = self.store[id] except KeyError: r = None # print('get_by_id 3', r) return r def get_id_by_name(self, parent='0', name=''): self.info(f'get_id_by_name {parent} ({type(parent)}) {name}') try: parent = self.store[parent] self.debug(f'{parent} {len(parent.children):d}') for child in parent.children: # if not isinstance(name, unicode): # name = name.decode('utf8') self.debug(f'{child.get_name()} {child.get_realpath()} ' + f'{name == child.get_realpath()}') if name == child.get_realpath(): return child.id except Exception as e: self.error(f'get_id_by_name: {e!r}') import traceback self.info(traceback.format_exc()) self.debug('get_id_by_name not found') return None def get_url_by_name(self, parent='0', name=''): self.info(f'get_url_by_name {parent!r} {name!r}') id = self.get_id_by_name(parent, name) # print 'get_url_by_name', id if id is None: return '' return self.store[id].url def update_config(self, **kwargs): self.info(f'update_config: {kwargs}') if 'content' in kwargs: new_content = kwargs['content'] new_content = set( [os.path.abspath(x) for x in new_content.split(',')]) new_folders = new_content.difference(self.content) obsolete_folders = self.content.difference(new_content) self.debug(f'new folders: {new_folders}\n' f'obsolete folders: {obsolete_folders}') for folder in obsolete_folders: self.remove_content_folder(folder) for folder in new_folders: self.add_content_folder(folder) self.content = new_content def add_content_folder(self, path): path = os.path.abspath(path) if path not in self.content: self.content.add(path) self.walk(path, self.store['1000'], self.ignore_file_pattern) def remove_content_folder(self, path): path = os.path.abspath(path) if path in self.content: id = self.get_id_by_name('1000', path) self.remove(id) self.content.remove(path) def walk(self, path, parent=None, ignore_file_pattern=''): self.debug(f'walk {path}') containers = [] parent = self.append(path, parent) if parent is not None: containers.append(parent) while len(containers) > 0: container = containers.pop() try: self.debug(f'adding {container.location!r}') self.info(f'walk.adding: {container.location}') for child in container.location.children(): if ignore_file_pattern.match(child.basename()) is not None: continue new_container = self.append(child.path, container) if new_container is not None: containers.append(new_container) except UnicodeDecodeError: self.warning( f'UnicodeDecodeError - there is something wrong with a ' + f'file located in {container.get_path()!r}') def create(self, mimetype, path, parent): self.debug(f'create {mimetype} {path} {type(path)} {parent}') UPnPClass = classChooser(mimetype) if UPnPClass is None: return None id = self.getnextID() if mimetype in ('root', 'directory'): id = str(id) else: _, ext = os.path.splitext(path) id = str(id) + ext.lower() update = False if hasattr(self, 'update_id'): update = True self.store[id] = FSItem( id, parent, path, mimetype, self.urlbase, UPnPClass, update=True, store=self, ) if hasattr(self, 'update_id'): self.update_id += 1 # print(self.update_id) if self.server: if hasattr(self.server, 'content_directory_server'): self.server.content_directory_server.set_variable( 0, 'SystemUpdateID', self.update_id) if parent is not None: value = (parent.get_id(), parent.get_update_id()) if self.server: if hasattr(self.server, 'content_directory_server'): self.server.content_directory_server.set_variable( 0, 'ContainerUpdateIDs', value) return id def append(self, bytes_path, parent): path = str(bytes_path) self.debug(f'append {path} {type(path)} {parent}') if not os.path.exists(path): self.warning(f'path {path!r} not available - ignored') return None if stat.S_ISFIFO(os.stat(path).st_mode): self.warning(f'path {path!r} is a FIFO - ignored') return None try: mimetype, _ = mimetypes.guess_type(path, strict=False) if mimetype is None: if os.path.isdir(path): mimetype = 'directory' if mimetype is None: return None id = self.create(mimetype, path, parent) if mimetype == 'directory': if self.inotify is not None: mask = (IN_CREATE | IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CHANGED) self.inotify.watch( FilePath(os.path.abspath(path)), mask=mask, autoAdd=False, callbacks=[partial(self.notify, parameter=id)], ) return self.store[id] except OSError as os_msg: # seems we have some permissions issues along the content path self.warning(f'path {path} isn\'t accessible, error {os_msg}') return None def remove(self, id): self.debug(f'FSSTore remove id: {id}') try: item = self.store[id] parent = item.get_parent() item.remove() del self.store[id] if hasattr(self, 'update_id'): self.update_id += 1 if self.server: self.server.content_directory_server.set_variable( 0, 'SystemUpdateID', self.update_id) # value = f'{parent.get_id():d},{parent_get_update_id():d}' value = (parent.get_id(), parent.get_update_id()) if self.server: self.server.content_directory_server.set_variable( 0, 'ContainerUpdateIDs', value) except KeyError: pass def notify(self, ignore, path, mask, parameter=None): self.info( 'Event %s on %s - parameter %r', ', '.join([fl for fl in _FLAG_TO_HUMAN if fl[0] == mask][0]), path.path, parameter, ) if mask & IN_CHANGED: # FIXME react maybe on access right changes, loss of read rights? # print(f'{path} was changed, parent {parameter:d} ({iwp.path})') pass if mask & IN_DELETE or mask & IN_MOVED_FROM: self.info(f'{path.path} was deleted, ' f'parent {parameter!r} ({path.parent.path})') id = self.get_id_by_name(parameter, path.path) if id is not None: self.remove(id) if mask & IN_CREATE or mask & IN_MOVED_TO: if mask & IN_ISDIR: self.info(f'directory {path.path} was created, ' f'parent {parameter!r} ({path.parent.path})') else: self.info(f'file {path.path} was created, ' f'parent {parameter!r} ({path.parent.path})') if self.get_id_by_name(parameter, path.path) is None: if path.isdir(): self.walk( path.path, self.get_by_id(parameter), self.ignore_file_pattern, ) else: if self.ignore_file_pattern.match(parameter) is None: self.append(str(path.path), str(self.get_by_id(parameter))) def getnextID(self): ret = self.next_id self.next_id += 1 return ret def backend_import(self, item, data): try: f = open(item.get_path(), 'w+b') if hasattr(data, 'read'): data = data.read() f.write(data) f.close() item.rebuild(self.urlbase) return 200 except IOError: self.warning(f'import of file {item.get_path()} failed') except Exception as msg: import traceback self.warning(traceback.format_exc()) return 500 def upnp_init(self): self.current_connection_id = None if self.server: self.server.connection_manager_server.set_variable( 0, 'SourceProtocolInfo', [ f'internal:{self.server.coherence.hostname}:audio/mpeg:*', 'http-get:*:audio/mpeg:*', f'internal:{self.server.coherence.hostname}:video/mp4:*', 'http-get:*:video/mp4:*', f'internal:{self.server.coherence.hostname}:application/ogg:*', # noqa: E501 'http-get:*:application/ogg:*', f'internal:{self.server.coherence.hostname}:video/x-msvideo:*', # noqa: E501 'http-get:*:video/x-msvideo:*', f'internal:{self.server.coherence.hostname}:video/mpeg:*', 'http-get:*:video/mpeg:*', f'internal:{self.server.coherence.hostname}:video/avi:*', 'http-get:*:video/avi:*', f'internal:{self.server.coherence.hostname}:video/divx:*', 'http-get:*:video/divx:*', f'internal:{self.server.coherence.hostname}:video/quicktime:*', # noqa: E501 'http-get:*:video/quicktime:*', f'internal:{self.server.coherence.hostname}:image/gif:*', 'http-get:*:image/gif:*', f'internal:{self.server.coherence.hostname}:image/jpeg:*', 'http-get:*:image/jpeg:*' # 'http-get:*:audio/mpeg:DLNA.ORG_PN=MP3;DLNA.ORG_OP=11;' # 'DLNA.ORG_FLAGS=01700000000000000000000000000000', # 'http-get:*:audio/x-ms-wma:DLNA.ORG_PN=WMABASE;' # 'DLNA.ORG_OP=11;DLNA.ORG_FLAGS' # '=01700000000000000000000000000000', # 'http-get:*:image/jpeg:DLNA.ORG_PN=JPEG_TN;' # 'DLNA.ORG_OP=01;DLNA.ORG_FLAGS' # '=00f00000000000000000000000000000', # 'http-get:*:image/jpeg:DLNA.ORG_PN=JPEG_SM;' # 'DLNA.ORG_OP=01;DLNA.ORG_FLAGS' # '=00f00000000000000000000000000000', # 'http-get:*:image/jpeg:DLNA.ORG_PN=JPEG_MED;' # 'DLNA.ORG_OP=01;DLNA.ORG_FLAGS' # '=00f00000000000000000000000000000', # 'http-get:*:image/jpeg:DLNA.ORG_PN=JPEG_LRG;' # 'DLNA.ORG_OP=01;DLNA.ORG_FLAGS' # '=00f00000000000000000000000000000', # 'http-get:*:video/mpeg:DLNA.ORG_PN=MPEG_PS_PAL;' # 'DLNA.ORG_OP=01;DLNA.ORG_FLAGS' # '=01700000000000000000000000000000', # 'http-get:*:video/x-ms-wmv:DLNA.ORG_PN=WMVMED_BASE;' # 'DLNA.ORG_OP=01;DLNA.ORG_FLAGS' # '=01700000000000000000000000000000', ], default=True, ) self.server.content_directory_server.set_variable( 0, 'SystemUpdateID', self.update_id) # self.server.content_directory_server.set_variable( # 0, 'SortCapabilities', '*') def upnp_ImportResource(self, *args, **kwargs): SourceURI = kwargs['SourceURI'] DestinationURI = kwargs['DestinationURI'] if DestinationURI.endswith('?import'): id = DestinationURI.split('/')[-1] id = id[:-7] # remove the ?import else: return failure.Failure(errorCode(718)) item = self.get_by_id(id) if item is None: return failure.Failure(errorCode(718)) def gotPage(headers): # print('gotPage', headers) content_type = headers.get('content-type', []) if not isinstance(content_type, list): content_type = list(content_type) if len(content_type) > 0: extension = mimetypes.guess_extension(content_type[0], strict=False) item.set_path(None, extension) shutil.move(tmp_path, item.get_path()) item.rebuild(self.urlbase) if hasattr(self, 'update_id'): self.update_id += 1 if self.server: if hasattr(self.server, 'content_directory_server'): self.server.content_directory_server.set_variable( 0, 'SystemUpdateID', self.update_id) if item.parent is not None: value = (item.parent.get_id(), item.parent.get_update_id()) if self.server: if hasattr(self.server, 'content_directory_server'): self.server.content_directory_server.set_variable( 0, 'ContainerUpdateIDs', value) def gotError(error, url): self.warning(f'error requesting {url}') self.info(error) os.unlink(tmp_path) return failure.Failure(errorCode(718)) tmp_fp, tmp_path = tempfile.mkstemp() os.close(tmp_fp) utils.downloadPage(SourceURI, tmp_path).addCallbacks(gotPage, gotError, None, None, [SourceURI], None) transfer_id = 0 # FIXME return {'TransferID': transfer_id} def upnp_CreateObject(self, *args, **kwargs): # print(f'CreateObject {kwargs}') if kwargs['ContainerID'] == 'DLNA.ORG_AnyContainer': if self.import_folder is not None: ContainerID = self.import_folder_id else: return failure.Failure(errorCode(712)) else: ContainerID = kwargs['ContainerID'] Elements = kwargs['Elements'] parent_item = self.get_by_id(ContainerID) if parent_item is None: return failure.Failure(errorCode(710)) if parent_item.item.restricted: return failure.Failure(errorCode(713)) if len(Elements) == 0: return failure.Failure(errorCode(712)) elt = DIDLElement.fromString(Elements) if elt.numItems() != 1: return failure.Failure(errorCode(712)) item = elt.getItems()[0] if item.parentID == 'DLNA.ORG_AnyContainer': item.parentID = ContainerID if (item.id != '' or item.parentID != ContainerID or item.restricted is True or item.title == ''): return failure.Failure(errorCode(712)) if '..' in item.title or '~' in item.title or os.sep in item.title: return failure.Failure(errorCode(712)) if item.upnp_class == 'object.container.storageFolder': if len(item.res) != 0: return failure.Failure(errorCode(712)) path = os.path.join(parent_item.get_path(), item.title) id = self.create('directory', path, parent_item) try: os.mkdir(path) except Exception: self.remove(id) return failure.Failure(errorCode(712)) if self.inotify is not None: mask = (IN_CREATE | IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CHANGED) self.inotify.watch( path, mask=mask, autoAdd=False, callbacks=[partial(self.notify, parameter=id)], ) new_item = self.get_by_id(id) didl = DIDLElement() didl.addItem(new_item.item) return {'ObjectID': id, 'Result': didl.toString()} if item.upnp_class.startswith('object.item'): _, _, content_format, _ = item.res[0].protocolInfo.split(':') extension = mimetypes.guess_extension(content_format, strict=False) path = os.path.join(parent_item.get_realpath(), item.title + extension) id = self.create('item', path, parent_item) new_item = self.get_by_id(id) for res in new_item.item.res: res.importUri = new_item.url + '?import' res.data = None didl = DIDLElement() didl.addItem(new_item.item) return {'ObjectID': id, 'Result': didl.toString()} return failure.Failure(errorCode(712)) def hidden_upnp_DestroyObject(self, *args, **kwargs): ObjectID = kwargs['ObjectID'] item = self.get_by_id(ObjectID) if item is None: return failure.Failure(errorCode(701)) self.info(f'upnp_DestroyObject: {item.location}') try: item.location.remove() except Exception as msg: self.error(f'upnp_DestroyObject [{Exception}]: {msg}') return failure.Failure(errorCode(715)) return {}
class INotifyObserver(object): def __init__(self, fp): self.fp = fp self.setup_done = Deferred() if settings["activate_inotify"]: self.setup_inotify() self.inotify_callbacks = [] def setup_inotify(self): """ Setup the INotifier watcher. """ self.notifier = INotify() self.notifier.startReading() filepath = FilePath(self.fp.name) self.notifier.watch(filepath, callbacks=[self.inotify]) # The setup_done is used mainly in testing self.setup_done.callback(True) self.setup_done = Deferred() def register_inotify_callback(self, callback): self.inotify_callbacks.append(callback) def inotify_callback(self, jdata): """ Must be overwritten by a child. """ raise NotImplementedError def inotify(self, ignored, filepath, mask): """ Callback for the INotify. It should call the sse resource with the changed layouts in the layout file if there are changes in the layout file. It calls the inotify_callback method first which must be overwritten by its children. """ hmask = humanReadableMask(mask) # Some editors move the file triggering several events in inotify. All # of them change some attribute of the file, so if that event happens, # see if there are changes and alert the sse resource in that case. if 'attrib' in hmask or 'modify' in hmask: self.fp.close() self.fp = open(self.fp.name, 'r') self.fp.seek(0) jdata = {} self.inotify_callback(jdata) changes = 0 for _, l in jdata.items(): changes += len(l) if changes > 0: for callback in self.inotify_callbacks: callback(jdata) log.msg("Change in %s" % self.fp.name) # Some editors move the file and inotify lose track of the file, so the # notifier must be restarted when some attribute changed is received. if 'attrib' in hmask: self.notifier.stopReading() self.setup_inotify()
def __init__(self): super(FilesystemNotifier, self).__init__() self.notifier = INotify() self.notifier.startReading()
def __init__(self): self.notifier = INotify() self.notifier.startReading()
def greed_controller(pathname): print 'WORKING:', pathname process_handler(pathname) print 'DONE:', pathname def created(ignored, path, mask): basename = path.basename() if re.match(r'^\.|^t$|^t_|^index\.md$', basename): return pathname = path.path pathname = find_root(pathname) if pathname not in call_ids or not call_ids[pathname].active(): print 'SET:', pathname call_ids[pathname] = reactor.callLater(CALL_DELAY, greed_controller, pathname) else: print 'RESET:', pathname call_ids[pathname].reset(CALL_DELAY) notifier = INotify() notifier.watch( FilePath(WATCH_DIRECTORY), mask=IN_MODIFY|IN_CREATE, callbacks=[created], autoAdd=True, recursive=True ) notifier.startReading() reactor.run()
class TreatCam: CAPTURE_GLOB = "capture-*.jpg" CAPTURE_DATETIME_FORMAT = "%Y%m%d-%H%M%S" LAST_CAPTURE_LINK_NAME = "lastsnap.jpg" def __init__(self, reactor, config): LOGGER.info("Initializing TreatCam") self.config = config self.reactor = reactor self.agent = Agent(reactor) self.defers = [] self.snapshotActionUrl = "http://localhost:%d/0/action/snapshot" % self.config.motionControlPort self.capturePath = FilePath(config.captureDir) self.lastCaptureLink = self.capturePath.child(TreatCam.LAST_CAPTURE_LINK_NAME) self.lastCaptureTime = None self.lastCaptureName = None self.findPreExistingLastCapture() self.notifier = INotify() self.notifier.startReading() self.notifier.watch(self.capturePath, mask=IN_CREATE, callbacks=[self.notifyCallback]) def __str__(self): return "TreatCam" def capturePhoto(self): LOGGER.debug("Received request to capture a photo") if not self.defers: LOGGER.debug("Sending HTTP GET request to motion daemon") httpRequestDefer = self.agent.request('GET', self.snapshotActionUrl) httpRequestDefer.addCallbacks(self.httpResponseCallback, self.httpResponseErrback) d = Deferred() self.addTimeout(d, 2) self.defers.append(d) return d def httpResponseCallback(self, ignored): LOGGER.debug("Received response from HTTP GET snapshot request to motion") def httpResponseErrback(self, failure): LOGGER.error("Error in HTTP GET snapshot request to motion") self.errbackDefers(failure) def errbackDefers(self, failure): defers = self.defers self.defers = [] for d in defers: if not d.called: d.errback(Failure()) def notifyCallback(self, ignored, filepath, mask): LOGGER.debug("Notify event %s on %s" % (humanReadableMask(mask), filepath.basename())) if mask & IN_CREATE and filepath == self.lastCaptureLink: capture = filepath.realpath().basename() LOGGER.info("New capture detected: %s" % capture) try: self.lastCaptureTime = self.extractDateTimeFromCaptureName(capture) self.lastCaptureName = capture except ValueError: self.errbackDefers(Failure()) if self.defers: defers = self.defers self.defers = [] for d in defers: if not d.called: d.callback(capture) def getLastCaptureTime(self): return self.lastCaptureTime def getLastCaptureName(self): return self.lastCaptureName def addTimeout(self, d, duration): timeout = reactor.callLater(duration, d.cancel) def cancelTimeout(result): if timeout.active(): timeout.cancel() return result d.addBoth(cancelTimeout) def extractDateTimeFromCaptureName(self, name): datetimeStr = name.split('-',1)[-1].rsplit('-',1)[0] return datetime.strptime(datetimeStr, TreatCam.CAPTURE_DATETIME_FORMAT) def findPreExistingLastCapture(self): captures = sorted(self.capturePath.globChildren(TreatCam.CAPTURE_GLOB)) if captures: lastCapturePath = captures[-1] name = lastCapturePath.basename() try: self.lastCaptureTime = self.extractDateTimeFromCaptureName(name) self.lastCaptureName = name LOGGER.info("Recovering %s at startup as last capture file" % self.lastCaptureName) except ValueError: LOGGER.exception("Unable to determine last capture file") pass def trimExcessCaptureFiles(self): captures = sorted(self.capturePath.globChildren(TreatCam.CAPTURE_GLOB)) excessCaptures = len(captures) - self.config.capturesToRetain if (excessCaptures > 0): for i in range(excessCaptures): LOGGER.info("Trimming: %s" % captures[i].basename()) captures[i].remove()