def start(self): self.stream = Stream(self.directory_changed, *self.target_dirs) self.observer = Observer() self.observer.schedule(self.stream) self.observer.daemon = True # Kill observer when main thread killed. self.observer.start() # start observer in the other thread.
def callback(event): global observer, git_sync if event.mask == kFSEventStreamEventFlagItemCreated: # Sublime Text Seems to trigger a lot of these and they don't seem to # warrant a new sync, so lets skip these for now. return filename = event.name git_dir = os.path.join(config['local_path'], '.git') if git_dir in filename: # Skip sync for file change that are in the .git directory. return if observer: # Stop observing. observer.unschedule(stream) observer.stop() git_sync.run_sync() # Start observing again. observer = Observer() observer.schedule(stream) observer.start()
def test_single_file_added(self): events = [] def callback(*args): events.append(args) f, path = self._make_temporary() from fsevents import Observer observer = Observer(callback, file_events=True) observer.start() observer.schedule(path) # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] f.close() time.sleep(1.1) # stop and join observer observer.stop() observer.join() self.assertEquals(events[0][0], path)
def main(): observer = Observer() observer.start() path = '/Users/seungjin/Desktop' def callback(event): #print "mask: " + str(event.mask) #print "cookie: " + str(event.cookie) #print "name: " + str(event.name) print event if event.mask == 256: #looks like new file comming newFile(str(event.name)) elif event.mask == 512: #looks line file deleted rmFile(str(event.name)) elif event.mask == 2: #looks like overwriting? print "hihihihi" def newFile(filename): print "new file is comming" #pushing this file into cloud gs = Googlestorage() #print gs.list_objects() gs.upload_objects(filename) def rmFile(filename): #print "%s is removed" % filename gs = Googlestorage() # this is evil.. do i need to make global goolgestorage object??? idk gs.delete_objects_and_buckets(filename) stream = Stream(callback,path,file_events=True) observer.schedule(stream)
def __init__(self, path, f, ignore=None): logging.info("Using FSEvents") self.path = path self.f = f self.ignore = ignore self.observer = Observer() self.observer.daemon = True self._streams = [] self._streams.append(createStream(self.path, path, self._update)) self._streamed_folders = [self.path] def add_stream(p): if p in self._streamed_folders: return self._streams.append(createStream(p, file_path, self._update)) self._streamed_folders.append(p) # Start an extra listener for all symlinks for file_path in fileList(self.path, folders=True): if os.path.islink(file_path): if os.path.isdir(file_path): add_stream(os.path.realpath(file_path)) else: add_stream(os.path.dirname(os.path.realpath(file_path)))
def __init__(self, directories, **kwargs): self._changes = _changes = [] # Path to current module module_dir = directories[0] # Paths to watch paths = [module_dir] # Explicitly adding paths outside of module dir. for path in NON_MODULE_DIRS: paths.append(path) self.observer = Observer() def callback(event, mask=None): # Get extension try: ext = os.path.splitext(event.name)[1] except IndexError: ext = None # Add to changes if we're watching a file with this extension. if ext in WATCHED_EXTENSIONS: _changes.append(event.name) self.stream = Stream(callback, file_events=True, *paths)
def main(): def sigterm_handler(_signo, _stack_frame): try: Notifier.notify('Unregistering fs watch', title='LittleHelp', subtitle=project_path) except: pass logging.info("Sigterm handler called") observer.unschedule(stream) observer.stop() observer.join() try: Notifier.notify('Unregistered fs watch', title='LittleHelp', subtitle=project_path) except: pass sys.exit(0) try: Notifier.notify('Registering watch', title='LittleHelp', subtitle=project_path) observer = Observer() stream = Stream(file_event_callback, project_path, file_events=True) observer.schedule(stream) observer.start() signal.signal(signal.SIGTERM, sigterm_handler) while True: sleep(0.1) except: logging.exception("Unhandled exception")
def watch(self): self.observer = Observer() self.observer.start() self.stream = Stream(self.file_event_callback, self.root, file_events=True) self.observer.schedule(self.stream)
class Dmon: def __init__(self, dir, handlers): self._handlers = handlers self._dir = dir def _call_handler(self, method, fname): for handler in self._handlers: getattr(handler, method)(fname) def _run_pyinotify(self): from lib import pyinotify class PyInotifyEventProcessor(pyinotify.ProcessEvent): def __init__(self, c_handler): self._call = c_handler def process_IN_CREATE(self, event): self._call('on_create', event.pathname) def process_IN_DELETE(self, event): self._call('on_delete', event.pathname) def process_IN_MODIFY(self, event): self._call('on_update', event.pathname) wm = pyinotify.WatchManager() self._observer = pyinotify.Notifier(wm, PyInotifyEventProcessor(self._call_handler)) mask = pyinotify.ALL_EVENTS wm.add_watch(self._dir, mask, rec=True) while True: self._observer.process_events() if self._observer.check_events(): self._observer.read_events() time.sleep(1) def _run_macfse(self): from lib import fsevents from fsevents import Stream from fsevents import Observer def macfse_callback(event): if event.mask in [256, 128]: self._call_handler('on_create', event.name) elif event.mask in [512, 64]: self._call_handler('on_delete', event.name) elif event.mask == 2: self._call_handler('on_update', event.name) self._observer = Observer() self._stream = Stream(macfse_callback, self._dir, file_events=True) self._observer.schedule(self._stream) self._observer.start() def start(self): if 'darwin' in sys.platform.lower(): self._run_macfse() elif 'linux' in sys.platform.lower(): self._run_pyinotify() def stop(self): if 'darwin' in sys.platform.lower(): self._observer.unschedule(self._stream) self._observer.stop() elif 'linux' in sys.platform.lower(): self._observer.stop()
def run(self): observer = Observer() observer.start() #handler = self.process_event(self) stream = Stream(self, self.path, file_events=True) observer.schedule(stream)
def start_events(self): """Setup the observer.""" self.get_tree(cached=False) if self.observer is None: self.observer = Observer() stream = Stream(self.fsevent_handler, self.real_root, ids=True) self.observer.schedule(stream) if not self.observer.is_alive(): self.observer.start()
def test_start_no_watch(self): events = [] def callback(*args): events.append(args) from fsevents import Observer observer = Observer() f, path = self._make_temporary() observer.start() # add single file import time while not observer.isAlive(): time.sleep(0.1) del events[:] f.close() time.sleep(0.2) # stop and join observer observer.stop() observer.join() self.assertEqual(events, [])
def main(): arg_parser = argparse.ArgumentParser() arg_parser.add_argument('path') arg_parser.add_argument('-c', '--cmd', required=True, nargs='+', help='Console command for execute') arg_parser.add_argument('-d', '--delay', type=int, default=5, nargs='?', help='Synchronization delay in seconds') arg_parser.add_argument('-v', '--verbose', action='store_true', help='verbose flag') try: args = arg_parser.parse_args() path = args.path if not os.path.isdir(path): raise IOError('Path `%s` is not a directory' % path) event_handler = EventHandler() timer = TimerInterval(callback_func, args.delay, event_handler=event_handler, cmd=args.cmd, verbose=args.verbose) stream = Stream(event_handler.callback_handler, path, file_events=True) observer = Observer() observer.schedule(stream) observer.start() timer.start() except IOError as e: print e except KeyboardInterrupt: observer.unschedule(stream) observer.stop() timer.stop()
def main(): arg_parser = argparse.ArgumentParser() arg_parser.add_argument('path') arg_parser.add_argument('-c', '--cmd', required=True, nargs='+', help='Console command for execute') arg_parser.add_argument('-d', '--delay', type=int, default=5, nargs='?', help='Synchronization delay in seconds') arg_parser.add_argument('-f', '--files', action='store_true', help="show changed files snapshot") arg_parser.add_argument('-v', '--verbose', action='store_true', help='increase verbosity') try: args = arg_parser.parse_args() path = args.path if not os.path.isdir(path): raise IOError('Path `%s` is not a directory' % path) event_handler = EventHandler() callback_params = { 'event_handler': event_handler, 'cmd': args.cmd, 'verbose': args.verbose, 'show_files': args.files } timer = TimerInterval(callback_func, args.delay, **callback_params) stream = Stream(event_handler.callback_handler, path, file_events=True) observer = Observer() observer.schedule(stream) observer.start() timer.start() except IOError as e: print e except KeyboardInterrupt: observer.unschedule(stream) observer.stop() timer.stop()
def loop(self): observer = Observer() observer.schedule(self.stream) observer.daemon = True observer.start() try: while True: # Cannot use observer.join() as it is not interrupted by # Ctrl-C time.sleep(10000) finally: observer.unschedule(self.stream) observer.stop()
def start_watching(self): """ Start watching our paths, and do the main work. """ # setup observer = Observer() stream = Stream( self.capture_event, self.settings['local']['watch_path'], file_events=True ) observer.schedule(stream) # go go go observer.run()
def main(): usage = 'usage: %prog [options] CALL_DIRECTORY' parser = OptionParser(usage=usage) parser.add_option('-p', '--port', dest='port', help='port to listen on') parser.add_option('-a', '--address', dest='address', help='address to listen on') (options, args) = parser.parse_args() if len(args) != 1: parser.print_help() sys.exit(1) call_definition_path = args[0] if options.port: port = int(options.port) else: port = PORT if options.address: ip_address = options.address else: ip_address = IP_ADDRESS # Monitor the call definition path to restart the # server if any of the files change, or new ones # are added observer = Observer() observer.start() stream = Stream(definition_change, call_definition_path) observer.schedule(stream) global call_handler call_handler = CallHandler(call_definition_path) server_class = BaseHTTPServer.HTTPServer global httpd httpd = server_class((ip_address, port), HttpRequestHandler) print 'WebServiceSimulator started' try: httpd.serve_forever() except KeyboardInterrupt: pass finally: print 'Shutting down web service simulator' httpd.server_close() sys.exit(0)
def main(): logging.basicConfig(level=logging.INFO) load_config() observer = Observer() observer.setDaemon(True) observer.start() observer.schedule(Stream(callback, ".", file_events=True)) try: while True: time.sleep(10) except KeyboardInterrupt: pass finally: observer.stop() print("")
def start_scanner(path): """ watch for file events in the supplied path """ try: observer = Observer() observer.start() stream = Stream(file_modified, path, file_events=True) observer.schedule(stream) print "Watching for changes. Press Ctrl-C to stop." while 1: pass except (KeyboardInterrupt, OSError, IOError): observer.unschedule(stream) observer.stop()
def __init__(self, path, f, ignore = None): logging.info("Using FSEvents") self.path = path self.f = f self.ignore = ignore self.observer = Observer() self.observer.daemon = True self._streams = [] self._streams.append(createStream(self.path, path, self._update)) self._streamed_folders = [self.path] def add_stream(p): if p in self._streamed_folders: return self._streams.append( createStream(p, file_path, self._update)) self._streamed_folders.append(p) # Start an extra listener for all symlinks for file_path in fileList(self.path, folders=True): if os.path.islink(file_path): if os.path.isdir(file_path): add_stream(os.path.realpath(file_path)) else: add_stream(os.path.dirname(os.path.realpath(file_path)))
class FSEventsScanner: """ Runs Ok, as long as you have MacFSEvents installed - but does not catch changes to the permissions nor uid/gid """ def __init__(self): self.paths = [] self.observer = Observer() self.stream = None def addPathToListenForChangesOn(self, thePath): print "added path:" + thePath self.paths.append(thePath) def startListening(self): self.observer.start() self.stream = Stream(callback, *self.paths, file_events=True) self.observer.schedule(self.stream)
def loop(self): observer = Observer() observer.schedule(self.stream) observer.daemon = True observer.start() try: while True: try: # Cannot use blocking get() as it is not interrupted by # Ctrl-C if self.wait_queue.get(10000) is True: self.force_restart() except Empty: pass finally: observer.unschedule(self.stream) observer.stop()
class MtimeFileWatcher(object): SUPPORTS_MULTIPLE_DIRECTORIES = True def __init__(self, directories, **kwargs): self._changes = _changes = [] # Path to current module module_dir = directories[0] # Paths to watch paths = [module_dir] # Explicitly adding paths outside of module dir. for path in NON_MODULE_DIRS: paths.append(path) self.observer = Observer() def callback(event, mask=None): # Get extension try: ext = os.path.splitext(event.name)[1] except IndexError: ext = None # Add to changes if we're watching a file with this extension. if ext in WATCHED_EXTENSIONS: _changes.append(event.name) self.stream = Stream(callback, file_events=True, *paths) def start(self): self.observer.schedule(self.stream) self.observer.start() def changes(self, timeout=None): time.sleep(0.1) changed = set(self._changes) del self._changes[:] return changed def quit(self): self.observer.unschedule(self.stream) self.observer.stop() self.observer.join()
def _loop_darwin(self): # pragma: no cover """loop implementation for darwin platform""" from fsevents import Observer #pylint: disable=F0401 from fsevents import Stream #pylint: disable=F0401 from fsevents import IN_MODIFY #pylint: disable=F0401 observer = Observer() handler = self._handle def fsevent_callback(event): if event.mask == IN_MODIFY: handler(event) for watch_this in self.watch_dirs: stream = Stream(fsevent_callback, watch_this, file_events=True) observer.schedule(stream) observer.daemon = True observer.start() try: # hack to keep main thread running... import time while True: time.sleep(99999) except (SystemExit, KeyboardInterrupt): pass
def __init__(self, triggers, queue_execution_wait=0.2): self.triggers = triggers self.observer = Observer() self.last_execution_stats = None self.firing_queue = [] self.firing_wait = queue_execution_wait self.firing_queue_thread = Timer(self.firing_wait, self.execute_firing_queue) self.is_executing_firing_queue = False
def watch_local_files(): print "Watching for changes to: {}".format(", ".join(TEMPLATES.keys())) observer = Observer() stream = Stream(event_callback, os.getcwd(), file_events=True) observer.schedule(stream) try: observer.start() while True: time.sleep(86400) except KeyboardInterrupt: observer.stop()
def start_session(): observer = Observer() observer.start() cb = create_cb() stream = Stream(cb, os.getcwd(), file_events=True) observer.schedule(stream) observer.run()
def watch_files(paths): global observer observer = Observer() stream = Stream(update_file, file_events=True, *paths) observer.schedule(stream) log.info("Starting observer") observer.daemon = True observer.start() log.info("Observer started")
def __init__(self,kPath=None): """ init the monitor """ self.observer = Observer() self.path = kPath self.eventLog = {} self.sem = threading.Semaphore() self.eventq = deque() #Queue.Queue() self.qChangeEvent = threading.Event() self.notifier = Notifier(self.qChangeEvent,self)
def main(): observer = Observer() observer.start() path = '/Users/seungjin/Desktop' def callback(event): #print "mask: " + str(event.mask) #print "cookie: " + str(event.cookie) #print "name: " + str(event.name) print event if event.mask == 256: #looks like new file comming newFile(str(event.name)) elif event.mask == 512: #looks line file deleted rmFile(str(event.name)) elif event.mask == 2: #looks like overwriting? print "hihihihi" def newFile(filename): print "new file is comming" #pushing this file into cloud gs = Googlestorage() #print gs.list_objects() gs.upload_objects(filename) def rmFile(filename): #print "%s is removed" % filename gs = Googlestorage( ) # this is evil.. do i need to make global goolgestorage object??? idk gs.delete_objects_and_buckets(filename) stream = Stream(callback, path, file_events=True) observer.schedule(stream)
def _run_macfse(self): from lib import fsevents from fsevents import Stream from fsevents import Observer def macfse_callback(event): if event.mask in [256, 128]: self._call_handler('on_create', event.name) elif event.mask in [512, 64]: self._call_handler('on_delete', event.name) elif event.mask == 2: self._call_handler('on_update', event.name) self._observer = Observer() self._stream = Stream(macfse_callback, self._dir, file_events=True) self._observer.schedule(self._stream) self._observer.start()
def main(): path = os.path.expanduser('~') enemy_name = '.DS_Store' observer = Observer() def callback(path, mask): try: full_path = os.path.join(path, enemy_name) os.remove(full_path) except: pass stream = Stream(callback, path) observer.schedule(stream) observer.start() subprocess.call(['find', path, '-name', '\\' + enemy_name, '-delete']) observer.join()
class Monitor: def __init__(self): self.obs = Observer() self.stream = Stream(self.callback, SOURCE_PATH, file_events=True) def callback(self, event): #file event callback function if event.name.endswith(VALID_SUFFIX) and os.path.isfile(event.name): sc.upload_track(event.name) def start(self): print "Starting monitor..." self.obs.start() self.obs.schedule(self.stream) def stop(self): print "Stopping monitor..." self.obs.unschedule(self.stream) self.obs.stop()
def toggle_activate(self): become_active = self.active.get() if self.activate_id is not None and become_active: # while failing to auto-activate allow user to deactivate # without changing stored preferences debug('cancelling timed activation') self.after_cancel(self.activate_id) self.activate_id = None self.wfbut['state'] = tk.NORMAL self.sfbut['state'] = tk.NORMAL self.purgebut['state'] = tk.NORMAL self.active.set(0) return debug('toggle activate', become_active) self.tick_app_icon() if become_active: ## turn on self.wfbut['state'] = tk.DISABLED self.sfbut['state'] = tk.DISABLED self.purgebut['state'] = tk.DISABLED self.queue_status('Looking for directories') self.update_idletasks() wfs = self.wfs_dir.get() if not self.dirs_okay(): if self.activate_id is None: self.activate_id = self.after(400, self.reactivate) return self.update_idletasks() self.do_sync() if self.observer is None: self.stream = Stream(self.do_sync, wfs) self.observer = Observer() self.observer.schedule(self.stream) self.observer.start() else: self.action.set(self.pad('Not active')) self.update_idletasks() ## turn off if self.observer: self.observer.unschedule(self.stream) self.observer.stop() self.observer = None self.wfbut['state'] = tk.NORMAL self.sfbut['state'] = tk.NORMAL self.purgebut['state'] = tk.NORMAL self.config['state']['active'] = str(become_active) self.write_config()
class Monitor: def __init__( self ): self.obs = Observer() self.stream = Stream ( self.callback, SOURCE_PATH, file_events=True ) def callback( self, event ): #file event callback function if event.name.endswith( VALID_SUFFIX ) and os.path.isfile( event.name ): sc.upload_track ( event.name ) def start( self ): print "Starting monitor..." self.obs.start() self.obs.schedule( self.stream ) def stop( self ): print "Stopping monitor..." self.obs.unschedule( self.stream ) self.obs.stop()
def main(): if len(sys.argv) != 3: print "Usage: %s FilePath SetID" % sys.argv[0] sys.exit(1) path = sys.argv[1] if not os.path.exists(path): print "Path does not exist" sys.exit(1) global driver driver = setupDriver(sys.argv[2]) observer = Observer() stream = Stream(callback, os.path.abspath(path), file_events=True) observer.schedule(stream) observer.start() print "Starting observation..." print "Ctrl-C to exit." try: # yields to allow Ctrl-C to be captured while True: # sleep saves CPU cycles time.sleep(1) except KeyboardInterrupt: print "\nQuitting program gracefully" observer.unschedule(stream) observer.stop() try: driver.quit() except URLError: # Some URLError pass sys.exit(1)
class FileSystemNotifier(object): def __init__(self, target_dirs, callback): self.target_dirs = target_dirs self.callback = callback def start(self): self.stream = Stream(self.directory_changed, *self.target_dirs) self.observer = Observer() self.observer.schedule(self.stream) self.observer.daemon = True # Kill observer when main thread killed. self.observer.start() # start observer in the other thread. def directory_changed(self, subpath, mask): logging.debug('Directory changed: %s, %s' % (subpath, mask)) self.callback(subpath, mask) def shutdown(self): self.observer.unschedule(self.stream) self.observer.stop()
def get_notified(path_regexps, project_path): """Get notified when files change, and minify them. """ observer = Observer() observer.start() def file_changed(file_change_event): """Callback for when a file has changed""" file_path = file_change_event.name if is_minifiable(file_path) and needs_minifying(file_path): tell_user_and_minify(file_path) stream = Stream(file_changed, project_path, file_events=True) observer.schedule(stream)
def main(): parser = argparse.ArgumentParser(description="Refresh browser tabs when local files change.") parser.add_argument("path", help="The directory to watch for changes.") parser.add_argument("keyword", help="Tabs with this keyword in their URL will be refreshed.") parser.add_argument("-b", "--browser", help="Which browser to refresh.", choices=sorted(browser_reloaders.keys()), default="chrome") args = parser.parse_args() print("Watching {} for changes...".format(os.path.abspath(args.path))) event_callback_partial = partial(event_callback, browsers=(args.browser,), keyword=args.keyword) observer = Observer() stream = Stream(event_callback_partial, sys.argv[1], file_events=True) observer.schedule(stream) try: observer.start() while True: time.sleep(86400) except KeyboardInterrupt: observer.stop()
def main(): signal.signal(signal.SIGINT, signal_handler) parser = argparse.ArgumentParser(description='Sync files to a remote server of scp.') parser.add_argument('path', type=str, metavar=('local', 'remote'), nargs=2, help='path for the monitor') args = parser.parse_args() print args.path observer = Observer() stream = Stream(lambda x, y: callback(x,y, args.path[1]), args.path[0]) observer.schedule(stream) try: observer.start() while True: # instead of this infinite loop, you can do pass # whatever processing you wanted except KeyboardInterrupt: observer.stop()
def main(): signal.signal(signal.SIGINT, signal_handler) parser = argparse.ArgumentParser( description='Sync files to a remote server of scp.') parser.add_argument('path', type=str, metavar=('local', 'remote'), nargs=2, help='path for the monitor') args = parser.parse_args() print args.path observer = Observer() stream = Stream(lambda x, y: callback(x, y, args.path[1]), args.path[0]) observer.schedule(stream) try: observer.start() while True: # instead of this infinite loop, you can do pass # whatever processing you wanted except KeyboardInterrupt: observer.stop()
def _loop_darwin(self): # pragma: no cover """loop implementation for darwin platform""" from fsevents import Observer #pylint: disable=F0401 from fsevents import Stream #pylint: disable=F0401 from fsevents import IN_MODIFY #pylint: disable=F0401 observer = Observer() handler = self._handle def fsevent_callback(event): if event.mask == IN_MODIFY: handler(event) for watch_this in self.watch_dirs: stream = Stream(fsevent_callback, watch_this, file_events=True) observer.schedule(stream) observer.daemon = True observer.run()
]) print "compile gss %s to %s " % (infile, outfile) if (path.endswith(".touch_index")): update_index_html() if (update_index): global_update_index = True os.utime("%s.touch_index" % root, None) def callback(e): name = e.name cookie = e.cookie mask = e.mask processor(name) for subdir, dir, files in os.walk(root): for file in files: path = "%s/%s" % (subdir, file) processor(path) update_index_html() try: observer = Observer() stream = Stream(callback, root, file_events=True) observer.schedule(stream) observer.run() except: observer.stop()
class FSEventsListener(object): def __init__(self, path, f, ignore=None): logging.info("Using FSEvents") self.path = path self.f = f self.ignore = ignore self.observer = Observer() self.observer.daemon = True self._streams = [] self._streams.append(createStream(self.path, path, self._update)) self._streamed_folders = [self.path] def add_stream(p): if p in self._streamed_folders: return self._streams.append(createStream(p, file_path, self._update)) self._streamed_folders.append(p) # Start an extra listener for all symlinks for file_path in fileList(self.path, folders=True): if os.path.islink(file_path): if os.path.isdir(file_path): add_stream(os.path.realpath(file_path)) else: add_stream(os.path.dirname(os.path.realpath(file_path))) def run(self): self.resume() self.observer.start() def pause(self): logging.debug("MacListener.PAUSE") for stream in self._streams: self.observer.unschedule(stream) def resume(self): logging.debug("MacListener.RESUME") for stream in self._streams: self.observer.schedule(stream) def stop(): self.observer.stop() def _update(self, event): path = event.name if self.ignore and self.ignore(path): return logging.debug("MacListener.update %s", event) result = { 'added': [], 'deleted': [], 'changed': [], } if os.path.exists(path): seconds_since_created = int(time.time()) - get_creation_time( os.path.realpath(path)) if seconds_since_created < 1.0: result["added"].append(path) else: result["changed"].append(path) else: result["deleted"].append(path) self.f(result)
def _setup_watchers(self, folders): """Setup watchers for all folders.""" return {'observer': Observer()}
with open( os.path.dirname(os.path.realpath(__file__)) + "/config.yaml.default", "r") as configfile: config = yaml.load(configfile, Loader=yaml.FullLoader) try: with open(args.config, "r") as configfile: config.update(yaml.load(configfile, Loader=yaml.FullLoader)) except FileNotFoundError: pass try: with open(os.path.expanduser("~/.z_sync.yaml"), "r") as configfile: config.update(yaml.load(configfile, Loader=yaml.FullLoader)) except FileNotFoundError: pass observer = Observer() syncer = Syncer(config, args.enable_notifications, args.interval) shell = Shell() shell_event = threading.Event() shell.set(shell_event, observer, syncer) shell_thread = threading.Thread(target=shell.cmdloop) shell_thread.setDaemon(True) shell_thread.start() if args.init: if args.from_server or args.from_local: direction = "from Local" if args.from_server: direction = "from Server" logging.info("--------- Init Sync %s -----------", direction)
def child(): observer = Observer() stream = Stream(callback, ".", file_events=True) observer.schedule(stream) observer.start()
sys.stderr.write("interrupted\n") [c.interrupt() for c in Context.s] # simple struct, not a new-style-class class Meta: def __init__(self): self.state = CREATED self.size = 0 self.hash = 0 self.id = uniqueId++ items = dict() #name -> meta { lastKnownState, size, hash, id} lookup = dict() #id -> items uniqueId = 1 def callback(fe): sys.stderr.write("%d %s\n" % (fe.mask, fe.name)) #check if fe.name is in items if __name__ == "__main__": # signal.signal(signal.SIGINT, handler) observer = Observer() stream = Stream(callback, os.getcwd() + "/scan", file_events=True) c = Context(observer, stream) c.start() raw_input("Press any key to exit\n") c.stop() c.wait() sys.stderr.write("all done\n")