def on_moved(self, event): LoggingEventHandler.on_moved(self.log_event_handler, event) src_path = event.src_path print(src_path) des_path = event.dest_path print(des_path) exec_command_connect_key('mv' + ' ' + src_path + ' ' + des_path) exec_command_connect_key('/usr/local/nginx18/sbin/nginx -t')
def go_watch(): try: print 'Start watching %s' % PATH_TO_WATCH logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S') event_handler = LoggingEventHandler() observer = Observer() observer.schedule(event_handler, PATH_TO_WATCH, recursive=True) observer.start() event_handler.on_modified = sync_upload event_handler.on_deleted = sync_upload_delete event_handler.on_created = sync_upload_create event_handler.on_moved = sync_upload_move time_loop = 1 try: while True: time.sleep(1) time_loop += 1 if not time_loop % AUTO_SYNC_TIME: print 'Auto sync every %s second' % AUTO_SYNC_TIME if not observer.event_queue.unfinished_tasks: sync_download() check_dir_deleted() print 'Auto check downloaded file or folder' check_dir_deleted() except KeyboardInterrupt: print 'End watching.' observer.stop() observer.join() except Exception, e: print '*' * 10 print e print '*' * 10 return
def create_observer(self): """ create a watchdog observer """ logger.info('Start watching %s' % config.path_to_watch) logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S') event_handler = LoggingEventHandler() observer = Observer() self.observer = observer event_handler.on_modified = self.sync_upload event_handler.on_deleted = self.sync_delete event_handler.on_created = self.sync_create event_handler.on_moved = self.sync_move event_handler.on_any_event = self.sync_any_event observer.schedule(event_handler, config.path_to_watch, recursive=True) return observer
def create_observer(self): ''' create a watchdog observer ''' logger.info('Start watching %s' % config.path_to_watch) logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S') event_handler = LoggingEventHandler() observer = Observer() self.observer = observer event_handler.on_modified = self.sync_upload event_handler.on_deleted = self.sync_delete event_handler.on_created = self.sync_create event_handler.on_moved = self.sync_move event_handler.on_any_event = self.sync_any_event observer.schedule(event_handler, config.path_to_watch, recursive=True) return observer
#Ab hier wird die Datei gelesen def on_modified(event): print(f"hey buddy, {event.src_path} has been modified") f=open(event.src_path, "r") fl =f.readlines() for yx in fl: print (yx) def on_moved(event): print(f"ok ok ok, someone moved {event.src_path} to {event.dest_path}") if __name__ == "__main__": logging.basicConfig(level=logging.INFO,format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S') event_handler = LoggingEventHandler() event_handler.on_created = on_created event_handler.on_deleted = on_deleted event_handler.on_modified = on_modified event_handler.on_moved = on_moved observer = Observer() observer.schedule(event_handler, path, recursive=True) observer.start() try: while True: time.sleep(1) except KeyboardInterrupt: observer.stop() observer.join()
def on_moved(self, event): LoggingEventHandler.on_moved(self, event) self._reconfig_logger(event)
help='Scan for existing files upon startup') parser.add_argument('--debug', action='store_true', help='Show debugging output') args = parser.parse_args() logging.basicConfig(level=getattr(logging, args.loglevel.upper(), None)) # Create Watchdog event handler. handler = LoggingEventHandler() handler.on_created = on_created handler.on_deleted = on_deleted handler.on_modified = on_modified handler.on_moved = on_moved # Create Watchdog observer. observer = Observer() observer.schedule(handler, args.src, recursive=True) try: if rclone(['about', args.dst]).returncode == 0: logging.info('Connected to backend "%s"' % args.dst) else: sys.exit() # Process any existing files. if args.scan_existing: scan_existing(args.src)