def main(): observer = Observer() observer.start() path = '/Users/seungjin/Desktop' def callback(event): #print "mask: " + str(event.mask) #print "cookie: " + str(event.cookie) #print "name: " + str(event.name) print event if event.mask == 256: #looks like new file comming newFile(str(event.name)) elif event.mask == 512: #looks line file deleted rmFile(str(event.name)) elif event.mask == 2: #looks like overwriting? print "hihihihi" def newFile(filename): print "new file is comming" #pushing this file into cloud gs = Googlestorage() #print gs.list_objects() gs.upload_objects(filename) def rmFile(filename): #print "%s is removed" % filename gs = Googlestorage( ) # this is evil.. do i need to make global goolgestorage object??? idk gs.delete_objects_and_buckets(filename) stream = Stream(callback, path, file_events=True) observer.schedule(stream)
def run(): observer = Observer() observer.start() # start foreman forerunner = Forerunner() forerunner.start() # call callback when files under the cwd change stream = Stream(forerunner.callback, './', file_events=True) observer.schedule(stream) while True: try: sleep(1) except KeyboardInterrupt: observer.unschedule(stream) observer.stop() observer.join() foreman = forerunner.foreman for i in range(1, 10): if foreman.poll() is not None: break print "sleeping... %i" % i sleep(1) if foreman.poll() is None: foreman.kill() sleep(1) break
def callback(event): global observer, git_sync if event.mask == kFSEventStreamEventFlagItemCreated: # Sublime Text Seems to trigger a lot of these and they don't seem to # warrant a new sync, so lets skip these for now. return filename = event.name git_dir = os.path.join(config['local_path'], '.git') if git_dir in filename: # Skip sync for file change that are in the .git directory. return if observer: # Stop observing. observer.unschedule(stream) observer.stop() git_sync.run_sync() # Start observing again. observer = Observer() observer.schedule(stream) observer.start()
def _loop_darwin(self): # pragma: no cover """loop implementation for darwin platform""" from fsevents import Observer #pylint: disable=F0401 from fsevents import Stream #pylint: disable=F0401 from fsevents import IN_MODIFY #pylint: disable=F0401 observer = Observer() handler = self._handle def fsevent_callback(event): if event.mask == IN_MODIFY: handler(event) for watch_this in self.watch_dirs: stream = Stream(fsevent_callback, watch_this, file_events=True) observer.schedule(stream) observer.daemon = True observer.start() try: # hack to keep main thread running... import time while True: time.sleep(99999) except (SystemExit, KeyboardInterrupt): pass
def test_single_file_added_with_observer_unscheduled(self): events = [] def callback(*args): events.append(args) f, path = self._make_temporary() from fsevents import Stream stream = Stream(callback, path) from fsevents import Observer observer = Observer() observer.start() import time while not observer.isAlive(): time.sleep(0.1) observer.schedule(stream) observer.unschedule(stream) # add single file del events[:] f.close() time.sleep(0.1) # stop and join observer observer.stop() observer.join() self.assertEqual(events, [])
def test_single_file_added(self): events = [] def callback(*args): events.append(args) f, path = self._make_temporary() from fsevents import Observer observer = Observer(callback, file_events=True) observer.start() observer.schedule(path) # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] f.close() time.sleep(1.1) # stop and join observer observer.stop() observer.join() self.assertEquals(events[0][0], path)
def main(): observer = Observer() observer.start() path = '/Users/seungjin/Desktop' def callback(event): #print "mask: " + str(event.mask) #print "cookie: " + str(event.cookie) #print "name: " + str(event.name) print event if event.mask == 256: #looks like new file comming newFile(str(event.name)) elif event.mask == 512: #looks line file deleted rmFile(str(event.name)) elif event.mask == 2: #looks like overwriting? print "hihihihi" def newFile(filename): print "new file is comming" #pushing this file into cloud gs = Googlestorage() #print gs.list_objects() gs.upload_objects(filename) def rmFile(filename): #print "%s is removed" % filename gs = Googlestorage() # this is evil.. do i need to make global goolgestorage object??? idk gs.delete_objects_and_buckets(filename) stream = Stream(callback,path,file_events=True) observer.schedule(stream)
def main(): def sigterm_handler(_signo, _stack_frame): try: Notifier.notify('Unregistering fs watch', title='LittleHelp', subtitle=project_path) except: pass logging.info("Sigterm handler called") observer.unschedule(stream) observer.stop() observer.join() try: Notifier.notify('Unregistered fs watch', title='LittleHelp', subtitle=project_path) except: pass sys.exit(0) try: Notifier.notify('Registering watch', title='LittleHelp', subtitle=project_path) observer = Observer() stream = Stream(file_event_callback, project_path, file_events=True) observer.schedule(stream) observer.start() signal.signal(signal.SIGTERM, sigterm_handler) while True: sleep(0.1) except: logging.exception("Unhandled exception")
def test_start_then_watch(self): events = [] def callback(*args): events.append(args) f, path = self._make_temporary() from fsevents import Stream stream = Stream(callback, path) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] f.close() time.sleep(0.2) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() self.assertEqual(events, [(path, self.create_and_remove_mask)])
def test_existing_directories_are_not_reported(self): import os from fsevents import Stream, Observer events = [] def callback(event): events.append(event) stream = Stream(callback, self.tempdir, file_events=True) new1 = os.path.join(self.tempdir, "newdir1") new2 = os.path.join(self.tempdir, "newdir2") os.mkdir(new1) observer = Observer() observer.schedule(stream) observer.start() import time while not observer.isAlive(): time.sleep(0.1) del events[:] time.sleep(1) os.mkdir(new2) try: time.sleep(1.1) observer.stop() observer.unschedule(stream) observer.join() from fsevents import IN_CREATE self.assertEqual(len(events), 1) self.assertEqual(events[0].mask, IN_CREATE) self.assertEqual(events[0].name, os.path.realpath(new2)) finally: os.rmdir(new1) os.rmdir(new2)
def test_single_file_added_multiple_streams(self): events = [] def callback(*args): events.append(args) f, path = self._make_temporary() from fsevents import Stream stream1 = Stream(callback, path) stream2 = Stream(callback, path) from fsevents import Observer observer = Observer() observer.schedule(stream1) observer.schedule(stream2) observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) time.sleep(0.1) del events[:] f.close() time.sleep(0.2) # stop and join observer observer.stop() observer.unschedule(stream1) observer.unschedule(stream2) observer.join() self.assertEquals(events, [(path, 0), (path, 0)])
def main(): arg_parser = argparse.ArgumentParser() arg_parser.add_argument('path') arg_parser.add_argument('-c', '--cmd', required=True, nargs='+', help='Console command for execute') arg_parser.add_argument('-d', '--delay', type=int, default=5, nargs='?', help='Synchronization delay in seconds') arg_parser.add_argument('-v', '--verbose', action='store_true', help='verbose flag') try: args = arg_parser.parse_args() path = args.path if not os.path.isdir(path): raise IOError('Path `%s` is not a directory' % path) event_handler = EventHandler() timer = TimerInterval(callback_func, args.delay, event_handler=event_handler, cmd=args.cmd, verbose=args.verbose) stream = Stream(event_handler.callback_handler, path, file_events=True) observer = Observer() observer.schedule(stream) observer.start() timer.start() except IOError as e: print e except KeyboardInterrupt: observer.unschedule(stream) observer.stop() timer.stop()
def _assert_action_after_watcher(self, process_asap, assertions_cb): events = [] def callback(event): events.append(event) import os import time from fsevents import Stream from fsevents import Observer observer = Observer(process_asap=process_asap) observer.start() stream = Stream(callback, self.tempdir, file_events=True) observer.schedule(stream) # add single file del events[:] f = open(os.path.join(self.tempdir, "test"), "w") f.write("abc") f.flush() f.close() time.sleep(0.2) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() os.unlink(f.name) assertions_cb(events, f)
def test_single_file_added_multiple_streams(self): events = [] def callback(*args): events.append(args) f, path = self._make_temporary() from fsevents import Stream stream1 = Stream(callback, path) stream2 = Stream(callback, path) from fsevents import Observer observer = Observer() observer.schedule(stream1) observer.schedule(stream2) observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) time.sleep(0.1) del events[:] f.close() time.sleep(0.2) # stop and join observer observer.stop() observer.unschedule(stream1) observer.unschedule(stream2) observer.join() self.assertEquals(events[0][0], path) self.assertEquals(events[1][0], path)
def test_start_no_watch(self): events = [] def callback(*args): events.append(args) from fsevents import Observer observer = Observer() f, path = self._make_temporary() observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] f.close() time.sleep(0.2) # stop and join observer observer.stop() observer.join() self.assertEqual(events, [])
class Dmon: def __init__(self, dir, handlers): self._handlers = handlers self._dir = dir def _call_handler(self, method, fname): for handler in self._handlers: getattr(handler, method)(fname) def _run_pyinotify(self): from lib import pyinotify class PyInotifyEventProcessor(pyinotify.ProcessEvent): def __init__(self, c_handler): self._call = c_handler def process_IN_CREATE(self, event): self._call('on_create', event.pathname) def process_IN_DELETE(self, event): self._call('on_delete', event.pathname) def process_IN_MODIFY(self, event): self._call('on_update', event.pathname) wm = pyinotify.WatchManager() self._observer = pyinotify.Notifier(wm, PyInotifyEventProcessor(self._call_handler)) mask = pyinotify.ALL_EVENTS wm.add_watch(self._dir, mask, rec=True) while True: self._observer.process_events() if self._observer.check_events(): self._observer.read_events() time.sleep(1) def _run_macfse(self): from lib import fsevents from fsevents import Stream from fsevents import Observer def macfse_callback(event): if event.mask in [256, 128]: self._call_handler('on_create', event.name) elif event.mask in [512, 64]: self._call_handler('on_delete', event.name) elif event.mask == 2: self._call_handler('on_update', event.name) self._observer = Observer() self._stream = Stream(macfse_callback, self._dir, file_events=True) self._observer.schedule(self._stream) self._observer.start() def start(self): if 'darwin' in sys.platform.lower(): self._run_macfse() elif 'linux' in sys.platform.lower(): self._run_pyinotify() def stop(self): if 'darwin' in sys.platform.lower(): self._observer.unschedule(self._stream) self._observer.stop() elif 'linux' in sys.platform.lower(): self._observer.stop()
def run(self): observer = Observer() observer.start() #handler = self.process_event(self) stream = Stream(self, self.path, file_events=True) observer.schedule(stream)
def watch_files(paths): global observer observer = Observer() stream = Stream(update_file, file_events=True, *paths) observer.schedule(stream) log.info("Starting observer") observer.daemon = True observer.start() log.info("Observer started")
def start_session(): observer = Observer() observer.start() cb = create_cb() stream = Stream(cb, os.getcwd(), file_events=True) observer.schedule(stream) observer.run()
def test_since_stream(self): events = [] def callback(*args): events.append(args) # two files in same directory import os path1 = os.path.realpath(self._make_tempdir()) + '/' f = self._make_temporary(path1)[0] g = self._make_temporary(path1)[0] from fsevents import Stream, FS_FLAGHISTORYDONE stream = Stream(callback, path1, ids = True) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() #create one file import time while not observer.isAlive(): time.sleep(0.1) del events[:] f.close() time.sleep(0.2) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() self.assertEqual(len(events),1) self.assertEqual(events[0][:-1], (path1, self.create_and_remove_mask)) #create a second file g.close() stream = Stream(callback, path1, since = events[0][2]) del events[:] # new observer observer = Observer() observer.schedule(stream) observer.start() time.sleep(0.2) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() self.assertEqual(len(events),2) #FIXME: why do events arrive here in reversed order? self.assertEqual(events[1], (path1, self.create_and_remove_mask)) self.assertEqual(events[0], (path1[:-1], FS_FLAGHISTORYDONE))
def watch_local_files(): print "Watching for changes to: {}".format(", ".join(TEMPLATES.keys())) observer = Observer() stream = Stream(event_callback, os.getcwd(), file_events=True) observer.schedule(stream) try: observer.start() while True: time.sleep(86400) except KeyboardInterrupt: observer.stop()
def main(): arg_parser = argparse.ArgumentParser() arg_parser.add_argument('path') arg_parser.add_argument('-c', '--cmd', required=True, nargs='+', help='Console command for execute') arg_parser.add_argument('-d', '--delay', type=int, default=5, nargs='?', help='Synchronization delay in seconds') arg_parser.add_argument('-f', '--files', action='store_true', help="show changed files snapshot") arg_parser.add_argument('-v', '--verbose', action='store_true', help='increase verbosity') try: args = arg_parser.parse_args() path = args.path if not os.path.isdir(path): raise IOError('Path `%s` is not a directory' % path) event_handler = EventHandler() callback_params = { 'event_handler': event_handler, 'cmd': args.cmd, 'verbose': args.verbose, 'show_files': args.files } timer = TimerInterval(callback_func, args.delay, **callback_params) stream = Stream(event_handler.callback_handler, path, file_events=True) observer = Observer() observer.schedule(stream) observer.start() timer.start() except IOError as e: print e except KeyboardInterrupt: observer.unschedule(stream) observer.stop() timer.stop()
def loop(self): observer = Observer() observer.schedule(self.stream) observer.daemon = True observer.start() try: while True: # Cannot use observer.join() as it is not interrupted by # Ctrl-C time.sleep(10000) finally: observer.unschedule(self.stream) observer.stop()
def get_notified(path_regexps, project_path): """Get notified when files change, and minify them. """ observer = Observer() observer.start() def file_changed(file_change_event): """Callback for when a file has changed""" file_path = file_change_event.name if is_minifiable(file_path) and needs_minifying(file_path): tell_user_and_minify(file_path) stream = Stream(file_changed, project_path, file_events=True) observer.schedule(stream)
def test_file_moved_to_not_watched_path(self): import os events = [] def callback(event): events.append(event) from fsevents import Stream not_watched_path = os.path.realpath(self._make_tempdir()) + '/' stream = Stream(callback, self.tempdir, file_events=True) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] time.sleep(2.1) src_name = os.path.join(self.tempdir, "test") dst_name = os.path.join(not_watched_path, "test") f = open(src_name, "w") f.write("abc") f.close() time.sleep(1.0) os.rename(src_name, dst_name) time.sleep(0.1) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() os.unlink(dst_name) os.rmdir(not_watched_path) from fsevents import IN_CREATE, IN_MODIFY, IN_DELETE self.assertEquals(len(events), 3) # assert the creation events self.assertEquals(events[0].mask, IN_CREATE) self.assertEquals(events[0].name, os.path.realpath(src_name)) self.assertEquals(events[1].mask, IN_MODIFY) self.assertEquals(events[1].name, os.path.realpath(src_name)) self.assertEquals(events[2].mask, IN_DELETE) self.assertEquals(events[2].name, os.path.realpath(src_name))
def test_multiple_files_added(self): events = [] def callback(*args): events.append(args) from fsevents import Observer observer = Observer() from fsevents import Stream observer.start() # wait until activation import time while not observer.isAlive(): time.sleep(0.1) time.sleep(0.1) # two files in same directory import os path1 = os.path.realpath(self._make_tempdir()) + '/' f = self._make_temporary(path1)[0] g = self._make_temporary(path1)[0] # one file in a separate directory path2 = os.path.realpath(self._make_tempdir()) + '/' h = self._make_temporary(path2)[0] stream = Stream(callback, path1, path2) observer.schedule(stream) try: del events[:] f.close() g.close() h.close() time.sleep(0.2) self.assertEqual( sorted(events), sorted([(path1, self.create_and_remove_mask), (path2, self.create_and_remove_mask)])) finally: f.close() g.close() h.close() os.rmdir(path1) os.rmdir(path2) # stop and join observer observer.stop() observer.unschedule(stream) observer.join()
def main(): usage = 'usage: %prog [options] CALL_DIRECTORY' parser = OptionParser(usage=usage) parser.add_option('-p', '--port', dest='port', help='port to listen on') parser.add_option('-a', '--address', dest='address', help='address to listen on') (options, args) = parser.parse_args() if len(args) != 1: parser.print_help() sys.exit(1) call_definition_path = args[0] if options.port: port = int(options.port) else: port = PORT if options.address: ip_address = options.address else: ip_address = IP_ADDRESS # Monitor the call definition path to restart the # server if any of the files change, or new ones # are added observer = Observer() observer.start() stream = Stream(definition_change, call_definition_path) observer.schedule(stream) global call_handler call_handler = CallHandler(call_definition_path) server_class = BaseHTTPServer.HTTPServer global httpd httpd = server_class((ip_address, port), HttpRequestHandler) print 'WebServiceSimulator started' try: httpd.serve_forever() except KeyboardInterrupt: pass finally: print 'Shutting down web service simulator' httpd.server_close() sys.exit(0)
def start_scanner(path): """ watch for file events in the supplied path """ try: observer = Observer() observer.start() stream = Stream(file_modified, path, file_events=True) observer.schedule(stream) print "Watching for changes. Press Ctrl-C to stop." while 1: pass except (KeyboardInterrupt, OSError, IOError): observer.unschedule(stream) observer.stop()
def main(): logging.basicConfig(level=logging.INFO) load_config() observer = Observer() observer.setDaemon(True) observer.start() observer.schedule(Stream(callback, ".", file_events=True)) try: while True: time.sleep(10) except KeyboardInterrupt: pass finally: observer.stop() print("")
def test_multiple_files_added(self): events = [] def callback(*args): events.append(args) from fsevents import Observer observer = Observer(callback, file_events=True) observer.start() # wait until activation import time while not observer.isAlive(): time.sleep(0.1) time.sleep(0.1) # two files in same directory import os path1 = os.path.realpath(self._make_tempdir()) + '/' f = self._make_temporary(path1)[0] g = self._make_temporary(path1)[0] # one file in a separate directory path2 = os.path.realpath(self._make_tempdir()) + '/' h = self._make_temporary(path2)[0] observer.schedule(path1) observer.schedule(path2) try: del events[:] f.close() g.close() h.close() time.sleep(0.2) events = [e[0] for e in events] self.assertEqual(sorted(events), sorted([path1, path2])) finally: f.close() g.close() h.close() os.rmdir(path1) os.rmdir(path2) # stop and join observer observer.stop() observer.unschedule(path1) observer.unschedule(path2) observer.join()
def test_single_file_created_and_modified(self): events = [] def callback(event): events.append(event) from fsevents import Stream stream = Stream(callback, self.tempdir, file_events=True) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] time.sleep(2.1) import os f = open(os.path.join(self.tempdir, "test"), "w") f.write("abc") f.flush() time.sleep(1.0) f.write("def") f.flush() f.close() time.sleep(0.1) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() os.unlink(f.name) from fsevents import IN_CREATE, IN_MODIFY self.assertEquals(len(events), 3) # assert events related to the creations self.assertEquals(events[0].mask, IN_CREATE) self.assertEquals(events[0].name, os.path.realpath(f.name)) self.assertEquals(events[1].mask, IN_MODIFY) self.assertEquals(events[1].name, os.path.realpath(f.name)) # assert events related to the modify self.assertEquals(events[2].mask, IN_MODIFY) self.assertEquals(events[2].name, os.path.realpath(f.name))
class FSEventsScanner: """ Runs Ok, as long as you have MacFSEvents installed - but does not catch changes to the permissions nor uid/gid """ def __init__(self): self.paths = [] self.observer = Observer() self.stream = None def addPathToListenForChangesOn(self, thePath): print "added path:" + thePath self.paths.append(thePath) def startListening(self): self.observer.start() self.stream = Stream(callback, *self.paths, file_events=True) self.observer.schedule(self.stream)
def loop(self): observer = Observer() observer.schedule(self.stream) observer.daemon = True observer.start() try: while True: try: # Cannot use blocking get() as it is not interrupted by # Ctrl-C if self.wait_queue.get(10000) is True: self.force_restart() except Empty: pass finally: observer.unschedule(self.stream) observer.stop()
def main(): path = os.path.expanduser('~') enemy_name = '.DS_Store' observer = Observer() def callback(path, mask): try: full_path = os.path.join(path, enemy_name) os.remove(full_path) except: pass stream = Stream(callback, path) observer.schedule(stream) observer.start() subprocess.call(['find', path, '-name', '\\' + enemy_name, '-delete']) observer.join()
def main(): parser = argparse.ArgumentParser(description="Refresh browser tabs when local files change.") parser.add_argument("path", help="The directory to watch for changes.") parser.add_argument("keyword", help="Tabs with this keyword in their URL will be refreshed.") parser.add_argument("-b", "--browser", help="Which browser to refresh.", choices=sorted(browser_reloaders.keys()), default="chrome") args = parser.parse_args() print("Watching {} for changes...".format(os.path.abspath(args.path))) event_callback_partial = partial(event_callback, browsers=(args.browser,), keyword=args.keyword) observer = Observer() stream = Stream(event_callback_partial, sys.argv[1], file_events=True) observer.schedule(stream) try: observer.start() while True: time.sleep(86400) except KeyboardInterrupt: observer.stop()
def main(): signal.signal(signal.SIGINT, signal_handler) parser = argparse.ArgumentParser(description='Sync files to a remote server of scp.') parser.add_argument('path', type=str, metavar=('local', 'remote'), nargs=2, help='path for the monitor') args = parser.parse_args() print args.path observer = Observer() stream = Stream(lambda x, y: callback(x,y, args.path[1]), args.path[0]) observer.schedule(stream) try: observer.start() while True: # instead of this infinite loop, you can do pass # whatever processing you wanted except KeyboardInterrupt: observer.stop()
class MtimeFileWatcher(object): SUPPORTS_MULTIPLE_DIRECTORIES = True def __init__(self, directories, **kwargs): self._changes = _changes = [] # Path to current module module_dir = directories[0] # Paths to watch paths = [module_dir] # Explicitly adding paths outside of module dir. for path in NON_MODULE_DIRS: paths.append(path) self.observer = Observer() def callback(event, mask=None): # Get extension try: ext = os.path.splitext(event.name)[1] except IndexError: ext = None # Add to changes if we're watching a file with this extension. if ext in WATCHED_EXTENSIONS: _changes.append(event.name) self.stream = Stream(callback, file_events=True, *paths) def start(self): self.observer.schedule(self.stream) self.observer.start() def changes(self, timeout=None): time.sleep(0.1) changed = set(self._changes) del self._changes[:] return changed def quit(self): self.observer.unschedule(self.stream) self.observer.stop() self.observer.join()
def test_single_file_created_and_modified(self): events = [] def callback(event): events.append(event) from fsevents import Stream stream = Stream(callback, self.tempdir, file_events=True) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] time.sleep(2.1) import os f = open(os.path.join(self.tempdir, "test"), "w") f.write("abc") f.flush() time.sleep(1.0) f.write("def") f.flush() f.close() time.sleep(0.1) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() os.unlink(f.name) from fsevents import IN_CREATE, IN_MODIFY self.assertEqual(len(events), 2) self.assertEqual(events[0].mask, IN_CREATE) self.assertEqual(events[0].name, os.path.realpath(f.name)) self.assertEqual(events[1].mask, IN_MODIFY) self.assertEqual(events[1].name, os.path.realpath(f.name))
class Monitor: def __init__(self): self.obs = Observer() self.stream = Stream(self.callback, SOURCE_PATH, file_events=True) def callback(self, event): #file event callback function if event.name.endswith(VALID_SUFFIX) and os.path.isfile(event.name): sc.upload_track(event.name) def start(self): print "Starting monitor..." self.obs.start() self.obs.schedule(self.stream) def stop(self): print "Stopping monitor..." self.obs.unschedule(self.stream) self.obs.stop()
def test_single_file_added_to_subdirectory(self): events = [] def callback(*args): events.append(args) import os directory = self._make_tempdir() subdirectory = os.path.realpath(os.path.join(directory, 'subdir')) + '/' os.mkdir(subdirectory) import time time.sleep(0.1) try: from fsevents import Stream stream = Stream(callback, directory) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() # add single file while not observer.isAlive(): time.sleep(0.1) del events[:] f = open(os.path.join(subdirectory, "test"), "w") f.write("abc") f.close() time.sleep(0.2) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() self.assertEqual(len(events), 1) self.assertEqual(events, [(subdirectory, self.modified_mask)]) finally: os.unlink(f.name) os.rmdir(subdirectory) os.rmdir(directory)
def test_single_file_moved(self): events = [] def callback(event): events.append(event) import os f = open(os.path.join(self.tempdir, "test"), "w") f.write("abc") f.flush() f.close() from fsevents import Stream stream = Stream(callback, self.tempdir, file_events=True) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] time.sleep(2.1) new = "%s.new" % f.name os.rename(f.name, new) time.sleep(0.1) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() os.unlink(new) from fsevents import IN_MOVED_FROM from fsevents import IN_MOVED_TO self.assertEqual(len(events), 2) self.assertEqual(events[0].mask, IN_MOVED_FROM) self.assertEqual(events[0].name, os.path.realpath(f.name)) self.assertEqual(events[1].mask, IN_MOVED_TO) self.assertEqual(events[1].name, os.path.realpath(new)) self.assertEqual(events[0].cookie, events[1].cookie)
def test_single_directory_deleted(self): events = [] def callback(event): events.append(event) import os new1 = os.path.join(self.tempdir, "newdir1") new2 = os.path.join(self.tempdir, "newdir2") try: os.mkdir(new1) os.mkdir(new2) import time time.sleep(0.2) from fsevents import Stream stream = Stream(callback, self.tempdir, file_events=True) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] time.sleep(0.1) os.rmdir(new2) time.sleep(1.0) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() from fsevents import IN_DELETE self.assertEqual(len(events), 1) self.assertEqual(events[0].mask, IN_DELETE) self.assertEqual(events[0].name, os.path.realpath(new2)) finally: os.rmdir(new1)
class FileSystemNotifier(object): def __init__(self, target_dirs, callback): self.target_dirs = target_dirs self.callback = callback def start(self): self.stream = Stream(self.directory_changed, *self.target_dirs) self.observer = Observer() self.observer.schedule(self.stream) self.observer.daemon = True # Kill observer when main thread killed. self.observer.start() # start observer in the other thread. def directory_changed(self, subpath, mask): logging.debug('Directory changed: %s, %s' % (subpath, mask)) self.callback(subpath, mask) def shutdown(self): self.observer.unschedule(self.stream) self.observer.stop()
def main(): signal.signal(signal.SIGINT, signal_handler) parser = argparse.ArgumentParser( description='Sync files to a remote server of scp.') parser.add_argument('path', type=str, metavar=('local', 'remote'), nargs=2, help='path for the monitor') args = parser.parse_args() print args.path observer = Observer() stream = Stream(lambda x, y: callback(x, y, args.path[1]), args.path[0]) observer.schedule(stream) try: observer.start() while True: # instead of this infinite loop, you can do pass # whatever processing you wanted except KeyboardInterrupt: observer.stop()
def test_fileevent_stream(self): events = [] def callback(*args): events.append(args) # two files in same directory import os path1 = os.path.realpath(self._make_tempdir()) + '/' f = self._make_temporary(path1)[0] g = self._make_temporary(path1)[0] from fsevents import Stream, FS_CFLAGFILEEVENTS, FS_ITEMISDIR stream = Stream(callback, path1, flags=FS_CFLAGFILEEVENTS) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() #create two files (here in the same directory) import time while not observer.isAlive(): time.sleep(0.1) del events[:] f.close() g.close() time.sleep(0.2) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() import os self.assertEqual(len(events),3) self.assertEqual(events, [(path1[:-1], self.create_and_remove_mask|FS_ITEMISDIR), (f.name, self.create_and_remove_mask), (g.name, self.create_and_remove_mask)])
class FSEventsListener(object): def __init__(self, path, f, ignore=None): logging.info("Using FSEvents") self.path = path self.f = f self.ignore = ignore self.observer = Observer() self.observer.daemon = True self._streams = [] self._streams.append(createStream(self.path, path, self._update)) self._streamed_folders = [self.path] def add_stream(p): if p in self._streamed_folders: return self._streams.append(createStream(p, file_path, self._update)) self._streamed_folders.append(p) # Start an extra listener for all symlinks for file_path in fileList(self.path, folders=True): if os.path.islink(file_path): if os.path.isdir(file_path): add_stream(os.path.realpath(file_path)) else: add_stream(os.path.dirname(os.path.realpath(file_path))) def run(self): self.resume() self.observer.start() def pause(self): logging.debug("MacListener.PAUSE") for stream in self._streams: self.observer.unschedule(stream) def resume(self): logging.debug("MacListener.RESUME") for stream in self._streams: self.observer.schedule(stream) def stop(): self.observer.stop() def _update(self, event): path = event.name if self.ignore and self.ignore(path): return logging.debug("MacListener.update %s", event) result = { 'added': [], 'deleted': [], 'changed': [], } if os.path.exists(path): seconds_since_created = int(time.time()) - get_creation_time( os.path.realpath(path)) if seconds_since_created < 1.0: result["added"].append(path) else: result["changed"].append(path) else: result["deleted"].append(path) self.f(result)