def test_existing_directories_are_not_reported(self): import os from fsevents import Stream, Observer events = [] def callback(event): events.append(event) stream = Stream(callback, self.tempdir, file_events=True) new1 = os.path.join(self.tempdir, "newdir1") new2 = os.path.join(self.tempdir, "newdir2") os.mkdir(new1) observer = Observer() observer.schedule(stream) observer.start() import time while not observer.isAlive(): time.sleep(0.1) del events[:] time.sleep(1) os.mkdir(new2) try: time.sleep(1.1) observer.stop() observer.unschedule(stream) observer.join() from fsevents import IN_CREATE self.assertEqual(len(events), 1) self.assertEqual(events[0].mask, IN_CREATE) self.assertEqual(events[0].name, os.path.realpath(new2)) finally: os.rmdir(new1) os.rmdir(new2)
def test_single_file_added_multiple_streams(self): events = [] def callback(*args): events.append(args) f, path = self._make_temporary() from fsevents import Stream stream1 = Stream(callback, path) stream2 = Stream(callback, path) from fsevents import Observer observer = Observer() observer.schedule(stream1) observer.schedule(stream2) observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) time.sleep(0.1) del events[:] f.close() time.sleep(0.2) # stop and join observer observer.stop() observer.unschedule(stream1) observer.unschedule(stream2) observer.join() self.assertEquals(events, [(path, 0), (path, 0)])
def test_start_then_watch(self): events = [] def callback(*args): events.append(args) f, path = self._make_temporary() from fsevents import Stream stream = Stream(callback, path) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] f.close() time.sleep(0.2) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() self.assertEqual(events, [(path, self.create_and_remove_mask)])
def _assert_action_after_watcher(self, process_asap, assertions_cb): events = [] def callback(event): events.append(event) import os import time from fsevents import Stream from fsevents import Observer observer = Observer(process_asap=process_asap) observer.start() stream = Stream(callback, self.tempdir, file_events=True) observer.schedule(stream) # add single file del events[:] f = open(os.path.join(self.tempdir, "test"), "w") f.write("abc") f.flush() f.close() time.sleep(0.2) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() os.unlink(f.name) assertions_cb(events, f)
def test_single_file_added_multiple_streams(self): events = [] def callback(*args): events.append(args) f, path = self._make_temporary() from fsevents import Stream stream1 = Stream(callback, path) stream2 = Stream(callback, path) from fsevents import Observer observer = Observer() observer.schedule(stream1) observer.schedule(stream2) observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) time.sleep(0.1) del events[:] f.close() time.sleep(0.2) # stop and join observer observer.stop() observer.unschedule(stream1) observer.unschedule(stream2) observer.join() self.assertEquals(events[0][0], path) self.assertEquals(events[1][0], path)
def test_single_file_added(self): events = [] def callback(*args): events.append(args) f, path = self._make_temporary() from fsevents import Observer observer = Observer(callback, file_events=True) observer.start() observer.schedule(path) # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] f.close() time.sleep(1.1) # stop and join observer observer.stop() observer.join() self.assertEquals(events[0][0], path)
def test_start_no_watch(self): events = [] def callback(*args): events.append(args) from fsevents import Observer observer = Observer() f, path = self._make_temporary() observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] f.close() time.sleep(0.2) # stop and join observer observer.stop() observer.join() self.assertEqual(events, [])
def run(): observer = Observer() observer.start() # start foreman forerunner = Forerunner() forerunner.start() # call callback when files under the cwd change stream = Stream(forerunner.callback, './', file_events=True) observer.schedule(stream) while True: try: sleep(1) except KeyboardInterrupt: observer.unschedule(stream) observer.stop() observer.join() foreman = forerunner.foreman for i in range(1, 10): if foreman.poll() is not None: break print "sleeping... %i" % i sleep(1) if foreman.poll() is None: foreman.kill() sleep(1) break
def test_single_file_added_with_observer_unscheduled(self): events = [] def callback(*args): events.append(args) f, path = self._make_temporary() from fsevents import Stream stream = Stream(callback, path) from fsevents import Observer observer = Observer() observer.start() import time while not observer.isAlive(): time.sleep(0.1) observer.schedule(stream) observer.unschedule(stream) # add single file del events[:] f.close() time.sleep(0.1) # stop and join observer observer.stop() observer.join() self.assertEqual(events, [])
def test_since_stream(self): events = [] def callback(*args): events.append(args) # two files in same directory import os path1 = os.path.realpath(self._make_tempdir()) + '/' f = self._make_temporary(path1)[0] g = self._make_temporary(path1)[0] from fsevents import Stream, FS_FLAGHISTORYDONE stream = Stream(callback, path1, ids = True) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() #create one file import time while not observer.isAlive(): time.sleep(0.1) del events[:] f.close() time.sleep(0.2) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() self.assertEqual(len(events),1) self.assertEqual(events[0][:-1], (path1, self.create_and_remove_mask)) #create a second file g.close() stream = Stream(callback, path1, since = events[0][2]) del events[:] # new observer observer = Observer() observer.schedule(stream) observer.start() time.sleep(0.2) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() self.assertEqual(len(events),2) #FIXME: why do events arrive here in reversed order? self.assertEqual(events[1], (path1, self.create_and_remove_mask)) self.assertEqual(events[0], (path1[:-1], FS_FLAGHISTORYDONE))
def test_file_moved_to_not_watched_path(self): import os events = [] def callback(event): events.append(event) from fsevents import Stream not_watched_path = os.path.realpath(self._make_tempdir()) + '/' stream = Stream(callback, self.tempdir, file_events=True) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] time.sleep(2.1) src_name = os.path.join(self.tempdir, "test") dst_name = os.path.join(not_watched_path, "test") f = open(src_name, "w") f.write("abc") f.close() time.sleep(1.0) os.rename(src_name, dst_name) time.sleep(0.1) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() os.unlink(dst_name) os.rmdir(not_watched_path) from fsevents import IN_CREATE, IN_MODIFY, IN_DELETE self.assertEquals(len(events), 3) # assert the creation events self.assertEquals(events[0].mask, IN_CREATE) self.assertEquals(events[0].name, os.path.realpath(src_name)) self.assertEquals(events[1].mask, IN_MODIFY) self.assertEquals(events[1].name, os.path.realpath(src_name)) self.assertEquals(events[2].mask, IN_DELETE) self.assertEquals(events[2].name, os.path.realpath(src_name))
def test_multiple_files_added(self): events = [] def callback(*args): events.append(args) from fsevents import Observer observer = Observer() from fsevents import Stream observer.start() # wait until activation import time while not observer.isAlive(): time.sleep(0.1) time.sleep(0.1) # two files in same directory import os path1 = os.path.realpath(self._make_tempdir()) + '/' f = self._make_temporary(path1)[0] g = self._make_temporary(path1)[0] # one file in a separate directory path2 = os.path.realpath(self._make_tempdir()) + '/' h = self._make_temporary(path2)[0] stream = Stream(callback, path1, path2) observer.schedule(stream) try: del events[:] f.close() g.close() h.close() time.sleep(0.2) self.assertEqual( sorted(events), sorted([(path1, self.create_and_remove_mask), (path2, self.create_and_remove_mask)])) finally: f.close() g.close() h.close() os.rmdir(path1) os.rmdir(path2) # stop and join observer observer.stop() observer.unschedule(stream) observer.join()
def test_multiple_files_added(self): events = [] def callback(*args): events.append(args) from fsevents import Observer observer = Observer(callback, file_events=True) observer.start() # wait until activation import time while not observer.isAlive(): time.sleep(0.1) time.sleep(0.1) # two files in same directory import os path1 = os.path.realpath(self._make_tempdir()) + '/' f = self._make_temporary(path1)[0] g = self._make_temporary(path1)[0] # one file in a separate directory path2 = os.path.realpath(self._make_tempdir()) + '/' h = self._make_temporary(path2)[0] observer.schedule(path1) observer.schedule(path2) try: del events[:] f.close() g.close() h.close() time.sleep(0.2) events = [e[0] for e in events] self.assertEqual(sorted(events), sorted([path1, path2])) finally: f.close() g.close() h.close() os.rmdir(path1) os.rmdir(path2) # stop and join observer observer.stop() observer.unschedule(path1) observer.unschedule(path2) observer.join()
def test_single_file_created_and_modified(self): events = [] def callback(event): events.append(event) from fsevents import Stream stream = Stream(callback, self.tempdir, file_events=True) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] time.sleep(2.1) import os f = open(os.path.join(self.tempdir, "test"), "w") f.write("abc") f.flush() time.sleep(1.0) f.write("def") f.flush() f.close() time.sleep(0.1) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() os.unlink(f.name) from fsevents import IN_CREATE, IN_MODIFY self.assertEquals(len(events), 3) # assert events related to the creations self.assertEquals(events[0].mask, IN_CREATE) self.assertEquals(events[0].name, os.path.realpath(f.name)) self.assertEquals(events[1].mask, IN_MODIFY) self.assertEquals(events[1].name, os.path.realpath(f.name)) # assert events related to the modify self.assertEquals(events[2].mask, IN_MODIFY) self.assertEquals(events[2].name, os.path.realpath(f.name))
def main(): path = os.path.expanduser('~') enemy_name = '.DS_Store' observer = Observer() def callback(path, mask): try: full_path = os.path.join(path, enemy_name) os.remove(full_path) except: pass stream = Stream(callback, path) observer.schedule(stream) observer.start() subprocess.call(['find', path, '-name', '\\' + enemy_name, '-delete']) observer.join()
def test_single_file_created_and_modified(self): events = [] def callback(event): events.append(event) from fsevents import Stream stream = Stream(callback, self.tempdir, file_events=True) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] time.sleep(2.1) import os f = open(os.path.join(self.tempdir, "test"), "w") f.write("abc") f.flush() time.sleep(1.0) f.write("def") f.flush() f.close() time.sleep(0.1) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() os.unlink(f.name) from fsevents import IN_CREATE, IN_MODIFY self.assertEqual(len(events), 2) self.assertEqual(events[0].mask, IN_CREATE) self.assertEqual(events[0].name, os.path.realpath(f.name)) self.assertEqual(events[1].mask, IN_MODIFY) self.assertEqual(events[1].name, os.path.realpath(f.name))
class MtimeFileWatcher(object): SUPPORTS_MULTIPLE_DIRECTORIES = True def __init__(self, directories, **kwargs): self._changes = _changes = [] # Path to current module module_dir = directories[0] # Paths to watch paths = [module_dir] # Explicitly adding paths outside of module dir. for path in NON_MODULE_DIRS: paths.append(path) self.observer = Observer() def callback(event, mask=None): # Get extension try: ext = os.path.splitext(event.name)[1] except IndexError: ext = None # Add to changes if we're watching a file with this extension. if ext in WATCHED_EXTENSIONS: _changes.append(event.name) self.stream = Stream(callback, file_events=True, *paths) def start(self): self.observer.schedule(self.stream) self.observer.start() def changes(self, timeout=None): time.sleep(0.1) changed = set(self._changes) del self._changes[:] return changed def quit(self): self.observer.unschedule(self.stream) self.observer.stop() self.observer.join()
def test_single_file_added_to_subdirectory(self): events = [] def callback(*args): events.append(args) import os directory = self._make_tempdir() subdirectory = os.path.realpath(os.path.join(directory, 'subdir')) + '/' os.mkdir(subdirectory) import time time.sleep(0.1) try: from fsevents import Stream stream = Stream(callback, directory) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() # add single file while not observer.isAlive(): time.sleep(0.1) del events[:] f = open(os.path.join(subdirectory, "test"), "w") f.write("abc") f.close() time.sleep(0.2) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() self.assertEqual(len(events), 1) self.assertEqual(events, [(subdirectory, self.modified_mask)]) finally: os.unlink(f.name) os.rmdir(subdirectory) os.rmdir(directory)
def test_single_file_moved(self): events = [] def callback(event): events.append(event) import os f = open(os.path.join(self.tempdir, "test"), "w") f.write("abc") f.flush() f.close() from fsevents import Stream stream = Stream(callback, self.tempdir, file_events=True) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] time.sleep(2.1) new = "%s.new" % f.name os.rename(f.name, new) time.sleep(0.1) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() os.unlink(new) from fsevents import IN_MOVED_FROM from fsevents import IN_MOVED_TO self.assertEqual(len(events), 2) self.assertEqual(events[0].mask, IN_MOVED_FROM) self.assertEqual(events[0].name, os.path.realpath(f.name)) self.assertEqual(events[1].mask, IN_MOVED_TO) self.assertEqual(events[1].name, os.path.realpath(new)) self.assertEqual(events[0].cookie, events[1].cookie)
def test_single_directory_deleted(self): events = [] def callback(event): events.append(event) import os new1 = os.path.join(self.tempdir, "newdir1") new2 = os.path.join(self.tempdir, "newdir2") try: os.mkdir(new1) os.mkdir(new2) import time time.sleep(0.2) from fsevents import Stream stream = Stream(callback, self.tempdir, file_events=True) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] time.sleep(0.1) os.rmdir(new2) time.sleep(1.0) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() from fsevents import IN_DELETE self.assertEqual(len(events), 1) self.assertEqual(events[0].mask, IN_DELETE) self.assertEqual(events[0].name, os.path.realpath(new2)) finally: os.rmdir(new1)
def test_fileevent_stream(self): events = [] def callback(*args): events.append(args) # two files in same directory import os path1 = os.path.realpath(self._make_tempdir()) + '/' f = self._make_temporary(path1)[0] g = self._make_temporary(path1)[0] from fsevents import Stream, FS_CFLAGFILEEVENTS, FS_ITEMISDIR stream = Stream(callback, path1, flags=FS_CFLAGFILEEVENTS) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() #create two files (here in the same directory) import time while not observer.isAlive(): time.sleep(0.1) del events[:] f.close() g.close() time.sleep(0.2) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() import os self.assertEqual(len(events),3) self.assertEqual(events, [(path1[:-1], self.create_and_remove_mask|FS_ITEMISDIR), (f.name, self.create_and_remove_mask), (g.name, self.create_and_remove_mask)])
def test_existing_directories_are_not_reported(self): import os from fsevents import Observer import time events = [] def callback(*args): events.append(args) new1 = os.path.join(self.tempdir, "newdir1") new2 = os.path.join(self.tempdir, "newdir2") os.mkdir(new1) time.sleep(1.0) observer = Observer(callback, file_events=True) observer.start() observer.schedule(self.tempdir) while not observer.isAlive(): time.sleep(0.1) del events[:] time.sleep(1) os.mkdir(new2) try: time.sleep(1.1) observer.stop() observer.unschedule(self.tempdir) observer.join() from fsevents import FSE_CREATED_FLAG from fsevents import FSE_ITEM_IS_DIR_FLAG flags = FSE_CREATED_FLAG | FSE_ITEM_IS_DIR_FLAG self.assertEquals(len(events), 1) self.assertEquals(events[0][0], os.path.realpath(new2)) self.assertEquals(events[0][1], flags) finally: os.rmdir(new1) os.rmdir(new2)
def test_single_file_deleted(self): events = [] def callback(event): events.append(event) import os f = open(os.path.join(self.tempdir, "test"), "w") f.write("abc") f.flush() f.close() from fsevents import Stream stream = Stream(callback, self.tempdir, file_events=True) from fsevents import Observer observer = Observer() observer.schedule(stream) observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] time.sleep(2.1) os.unlink(f.name) time.sleep(0.1) # stop and join observer observer.stop() observer.unschedule(stream) observer.join() from fsevents import IN_DELETE self.assertEqual(len(events), 1) self.assertEqual(events[0].mask, IN_DELETE) self.assertEqual(events[0].name, os.path.realpath(f.name))
syncer.lock() syncer.sync(config["path_source"], args.from_server) syncer.release() else: logging.critical("--init needs --from-server or --from-local") syncer.cleanup(True) sys.exit(5) # CTRL+Z will force a full sync : signal.signal(signal.SIGTSTP, syncer.sig_handler) observer.start() logging.info("------- FS WATCHING %s -------" % config["path_source"]) logging.info("(CTRL+z to force a full sync)") logging.debug("Interval %ssec", args.interval) os.chdir(config["path_source"]) stream = Stream(syncer.callback, config["path_source"]) try: observer.schedule(stream) observer.join() logging.info("Schedule finished") except KeyboardInterrupt: logging.warning("CTRL+c") finally: logging.info("Stopping observer...") observer.stop() logging.debug("Cleanup threads ...") syncer.cleanup(True) logging.info("Finished.")
class EventTriggerManager(object): def __init__(self, triggers, queue_execution_wait=0.2): self.triggers = triggers self.observer = Observer() self.last_execution_stats = None self.firing_queue = [] self.firing_wait = queue_execution_wait self.firing_queue_thread = Timer(self.firing_wait, self.execute_firing_queue) self.is_executing_firing_queue = False def queue_firing_trigger(self, trigger): if trigger not in self.firing_queue: log.debug("adding %s to firing_queue" % trigger) self.firing_queue.insert(0, trigger) if (not self.is_executing_firing_queue and self.firing_queue_thread.is_alive()): log.debug("received another queue request, canceling timer") self.firing_queue_thread.cancel() self.firing_queue_thread.join() if not self.firing_queue_thread.is_alive(): self.firing_queue_thread = Timer(self.firing_wait, self.execute_firing_queue) log.debug("starting new timer") self.firing_queue_thread.start() def execute_firing_queue(self): log.debug("executing firing queue") if self.is_executing_firing_queue: log.error("execution in progress") return self.is_executing_firing_queue = True while len(self.firing_queue): pt = self.firing_queue.pop() stat = ProcExecutionStats(command=pt.command) pt.fire() stat.execution_completed() self.last_execution_stats = stat self.is_executing_firing_queue = False def start(self, prefire=True): self.observer.start() for pt in self.triggers: log.debug("scheduling stream: %s" % pt) pt.schedule_execution = self.queue_firing_trigger self.observer.schedule(pt.stream) if prefire: for pt in self.triggers: self.queue_firing_trigger(pt) def stop(self): for pt in self.triggers: self.observer.unschedule(pt.stream) # remove pending triggers from queue. This must be done # after unscheduling the stream but before killing a # potential running process to avoid a race condition. self.firing_queue = [] # kill any process being run by the trigger now that it # cannot be rescheduled pt.killfire() self.observer.stop() self.observer.join()