class ObserverWrapper(): def __init__(self, key, monitoring_directory, storage_directory, share_ID, user_name, machine_ID, command_port, json_response_dict, min_update_interval, command_dict): self.key = key self.monitoring_directory = monitoring_directory self.storage_directory = storage_directory self.share_ID = share_ID self.user_name = user_name self.machine_ID = machine_ID self.command_port = command_port self.json_response_dict = json_response_dict self.min_update_interval = min_update_interval self.command_dict = command_dict def run(self): self.a_s = AutoSync(self.key, self.monitoring_directory, self.storage_directory, self.share_ID, self.user_name, self.machine_ID, self.command_port, self.json_response_dict, self.min_update_interval) self.observer = Observer( ) #The observer objects can't cross process boundaries because they are unpicklable self.observer.schedule(self.a_s, self.monitoring_directory, recursive=True) self.observer.start() while True: if self.monitoring_directory not in self.command_dict: time.sleep(0.1) continue command = self.command_dict[self.monitoring_directory] if command == 'stop' and self.observer.isAlive(): logging.debug('stopping observer') self.observer.stop() elif command == 'start' and not self.observer.isAlive(): logging.debug('starting observer') self.observer.start() elif command == 'terminate': logging.debug('terminating observer') if self.observer.isAlive(): self.observer.stop() self.observer.unschedule_all() return def start(self): proc = Process(target=self.run) proc.start() logging.info('observer started')
class relay_main(): ''' The relay main thread class for the file event handling. NOTE :: THIS THREAD SHOULDNT HOLD ANY LOCK OR ENTER INTO CRITICAL SECTION BY BLOCKING OTHER THREADS. ITS POSSIBLE THIS THREAD GET KILLED BY MAIN THREAD ANYTIME. HOLDING A CRITICAL SECTION RESOURCE IN THIS MODULE LEADS A DEAD LOCK. ''' def __init__(self): self.nv_log_handler = nv_logger(self.__class__.__name__).get_logger() self.os_context = nv_os_lib() self.watcher_obj = relay_watcher() self.observer_obj = Observer() def process_relay(self): try: if not self.os_context.is_path_exists(NV_MID_BOX_CAM_STREAM_DIR): self.nv_log_handler.error("%s Directory not found", NV_MID_BOX_CAM_STREAM_DIR) raise FileNotFoundError self.observer_obj.schedule(self.watcher_obj, NV_MID_BOX_CAM_STREAM_DIR, recursive=True) self.observer_obj.start() except FileNotFoundError: raise except Exception as e: raise e def relay_stop(self): self.watcher_obj.kill_relay_thread() self.observer_obj.stop() def relay_join(self): if self.observer_obj.isAlive(): self.observer_obj.join()
def wait_on_entry_calc(self, key): with self.lock: self._reload_cache() entry = self._get_cache()[key] if not entry['being_calculated']: return entry['value'] event_handler = _PickleCore.CacheChangeHandler( filename=self._cache_fname(), core=self, key=key ) observer = Observer() event_handler.inject_observer(observer) observer.schedule( event_handler, path=EXPANDED_CACHIER_DIR, recursive=True ) observer.start() observer.join(timeout=1.0) if observer.isAlive(): # print('Timedout waiting. Starting again...') return self.wait_on_entry_calc(key) # print("Returned value: {}".format(event_handler.value)) return event_handler.value
def main(): #logging.debug( "initialing...") logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S', filename='./log/console.log') logging.debug( "start process") event_handler = CompleteEventHandler() logging.info( "Watching Folder : " + event_handler.WATCH_FOLDER) observer = Observer() observer.schedule(event_handler, path=os.path.abspath(event_handler.WATCH_FOLDER), recursive=True) observer.start() # 데몬 쓰레드 t1 = threading.Thread(target=getCache, args=(event_handler.HTTP + event_handler.GEOSERVER_URL + '/geoserver/gwc/rest/seed/' + event_handler.GEOSERVER_STORE_NAME + ':' + event_handler.GEOSERVER_LAYER_NAME + '.json', event_handler.GEOSERVER_USER, event_handler.GEOSERVER_PASSWORD, event_handler.GEOWEBCACHE_BOX_LEVEL)) t1.daemon = True t1.start() #t1.stop() try: while True: time.sleep(1) except KeyboardInterrupt: observer.stop() if observer.isAlive() : observer.stop() observer.join()
class PropMTimeWatcher: def __init__(self, app_data_folder): self._app_data_folder = app_data_folder self._observer = Observer() self.schedule() def schedule(self): pref = PropMTimePreferences(self._app_data_folder) self._observer.unschedule_all() for path, watcher in pref.get_all_paths().items(): if watcher: if os.path.exists(path): event_handler = ModHandler(path, self._app_data_folder) log.info('scheduling watcher : %s' % path) self._observer.schedule(event_handler, path=path, recursive=True) else: log.error('Error: "%s" does not exist.\n\nPlease edit the path.\n\nTo do this, click on the %s icon and select "Paths".' % (path, __application_name__)) self._observer.start() def request_exit(self): self._observer.unschedule_all() self._observer.stop() self._observer.join(TIMEOUT) if self._observer.isAlive(): log.error('observer still alive')
async def read_ouput_txt(observer: Observer): path = config.factorio_user_data + '\script-output\output.txt' path = os.path.expandvars(path) while observer.isAlive() is True: await asyncio.sleep(0.1) with open(path) as fp: return fp.read()
def sneakernet_import_thread(): """Add block data dir & confed paths to fs observer to watch for new bls""" observer = Observer() for path in watch_paths: observer.schedule(_Importer(), path, recursive=True) observer.start() while observer.isAlive(): # call import func with timeout observer.join(60)
class ObserverWrapper(): def __init__(self, key, monitoring_directory, storage_directory, share_ID, user_name, machine_ID, command_port, json_response_dict, min_update_interval, command_dict): self.key = key self.monitoring_directory = monitoring_directory self.storage_directory = storage_directory self.share_ID = share_ID self.user_name = user_name self.machine_ID = machine_ID self.command_port = command_port self.json_response_dict = json_response_dict self.min_update_interval = min_update_interval self.command_dict = command_dict def run(self): self.a_s = AutoSync(self.key, self.monitoring_directory, self.storage_directory, self.share_ID, self.user_name, self.machine_ID, self.command_port, self.json_response_dict, self.min_update_interval) self.observer = Observer() #The observer objects can't cross process boundaries because they are unpicklable self.observer.schedule(self.a_s, self.monitoring_directory, recursive=True) self.observer.start() while True: if self.monitoring_directory not in self.command_dict: time.sleep(0.1) continue command = self.command_dict[self.monitoring_directory] if command == 'stop' and self.observer.isAlive(): logging.debug('stopping observer') self.observer.stop() elif command == 'start' and not self.observer.isAlive(): logging.debug('starting observer') self.observer.start() elif command == 'terminate': logging.debug('terminating observer') if self.observer.isAlive(): self.observer.stop() self.observer.unschedule_all() return def start(self): proc = Process(target=self.run) proc.start() logging.info('observer started')
def run(self): print('run') observer = Observer() observer.schedule(self, self.path, recursive=True) observer.start() try: while observer.isAlive() and not self.__stop: observer.join(self.interval) except KeyboardInterrupt: observer.stop() observer.join()
def watch_mount(detect_callback): observer = Observer() path = "/Volumes" observer.schedule(WatchMount(observer, detect_callback), path=path) observer.start() try: while observer.isAlive(): time.sleep(1) except KeyboardInterrupt: observer.stop() observer.join()
def watch(ctx): """Watch the directory for changes. Automatically run tests. """ vcs = ctx.obj['vcs'] event_handler = TestsEventHandler(vcs) observer = Observer() observer.schedule(event_handler, vcs.path, recursive=True) observer.start() click.echo('Watching directory `{path}`. Use ctrl-c to stop.'.format(path=vcs.path)) while observer.isAlive(): observer.join(timeout=1)
def watch(on_change, path='.'): path = '.' handler = TestEventHandler() observer = Observer() observer.schedule(handler, path, recursive=True) observer.start() try: while observer.isAlive(): handler.check(on_change) observer.join(0.25) except KeyboardInterrupt: observer.stop() observer.join()
def handle(self, *args, **options): path = f'/{options["root"]}/preprints/arxiv/metadata' observer = Observer() event_handler = MyEventHandler(observer) observer.schedule(event_handler, path, recursive=True) self.stdout.write(self.style.WARNING(f'Watching {path} ...')) observer.start() try: while observer.isAlive(): observer.join(1) except KeyboardInterrupt: observer.stop() observer.join()
def watch(ctx): """Watch the directory for changes. Automatically run tests. """ vcs = ctx.obj['vcs'] event_handler = TestsEventHandler(vcs) observer = Observer() observer.schedule(event_handler, vcs.path, recursive=True) observer.start() click.echo('Watching directory `{path}`. Use ctrl-c to stop.'.format( path=vcs.path)) while observer.isAlive(): observer.join(timeout=1)
def run_input(stopEvent, timeout, debug=False): path = os.path.join(Commons.getEnv(Config.vera), Setup.path_inp) event_handler = vera_handler.InputHandler(debug) observer = Observer() observer.schedule(event_handler, path, recursive=False) observer.start() log.info('AI::VERA - running <%s> Watching path: [%s]', 'run_img', path) try: while True: event_set = stopEvent.wait(timeout) if event_set: break else: continue except: observer.stop() log.info('AI::VERA - stopping %s', 'run_img') if observer.isAlive(): observer.stop() observer.join() return True
def main(): #logging.debug( "initialing...") logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S', filename='./log/console.log') logging.debug("start process") event_handler = CompleteEventHandler() logging.info("Watching Folder : " + event_handler.WATCH_FOLDER) observer = Observer() observer.schedule(event_handler, path=os.path.abspath(event_handler.WATCH_FOLDER), recursive=True) observer.start() # 데몬 쓰레드 t1 = threading.Thread( target=getCache, args=(event_handler.HTTP + event_handler.GEOSERVER_URL + '/geoserver/gwc/rest/seed/' + event_handler.GEOSERVER_STORE_NAME + ':' + event_handler.GEOSERVER_LAYER_NAME + '.json', event_handler.GEOSERVER_USER, event_handler.GEOSERVER_PASSWORD, event_handler.GEOWEBCACHE_BOX_LEVEL)) t1.daemon = True t1.start() #t1.stop() try: while True: time.sleep(1) except KeyboardInterrupt: observer.stop() if observer.isAlive(): observer.stop() observer.join()
def watch(): """Implementation of -w/--watch option.""" print("Starting watchdog") logging.basicConfig( level=logging.INFO, format="%(asctime)s - %(message)s", datefmt="%Y-%m-%d %H:%M:%S", ) event_handler = UploadingEventHandler() observer = Observer() observer.schedule(event_handler, args.path, recursive=True) observer.start() print("Awaiting new changes") try: while observer.isAlive(): print("loop started") observer.join() except KeyboardInterrupt: print("Stopping") observer.stop() observer.join()
default=None) opts, args = parser.parse_args() # general use case - automatic file detection if not opts.manual: queue = multiprocessing.Queue() # create the file observer that will watch for new files observer = Observer() observer.schedule(NewImageHandler(queue), path=path_to_watch, recursive=True) observer.start() if observer.isAlive(): print('\n\n***** OBSERVER IS LIVE *****\n\n') else: print('[observer] the observer failed to start!') exit(1) # check if there are input images without results print('[observer] attempting to recover state...') num_recovered = recover_state(queue, path_to_watch, results_dir) print( '[observer] found %d file(s) that are missing results to add to the queue' % (num_recovered)) print('[observer] observer is now monitoring %s for new files' % (path_to_watch)) task_list = []
class AiidaLabApp(traitlets.HasTraits): """Manage installation status of an AiiDA lab app. Arguments: name (str): Name of the Aiida lab app. app_data (dict): Dictionary containing the app metadata. aiidalab_apps_path (str): Path to directory at which the app is expected to be installed. """ path = traitlets.Unicode(allow_none=True, readonly=True) install_info = traitlets.Unicode() available_release_lines = traitlets.Set(traitlets.Unicode) installed_release_line = traitlets.Unicode(allow_none=True) installed_version = traitlets.Unicode(allow_none=True) updates_available = traitlets.Bool(readonly=True, allow_none=True) busy = traitlets.Bool(readonly=True) modified = traitlets.Bool(readonly=True, allow_none=True) class AppPathFileSystemEventHandler(FileSystemEventHandler): """Internal event handeler for app path file system events.""" def __init__(self, app): self.app = app def on_any_event(self, event): """Refresh app for any event.""" self.app.refresh_async() def __init__(self, name, app_data, aiidalab_apps_path): super().__init__() if app_data is not None: self._git_url = app_data['git_url'] self._meta_url = app_data['meta_url'] self._git_remote_refs = app_data['gitinfo'] self.categories = app_data['categories'] self._meta_info = app_data['metainfo'] else: self._git_url = None self._meta_url = None self._git_remote_refs = {} self.categories = None self._meta_info = None self._observer = None self._check_install_status_changed_thread = None self.name = name self.path = os.path.join(aiidalab_apps_path, self.name) self.refresh_async() self._watch_repository() @traitlets.default('modified') def _default_modified(self): if self.is_installed(): return self._repo.dirty() return None @traitlets.default('busy') def _default_busy(self): # pylint: disable=no-self-use return False @contextmanager def _show_busy(self): """Apply this decorator to indicate that the app is busy during execution.""" self.set_trait('busy', True) try: yield finally: self.set_trait('busy', False) def _watch_repository(self): """Watch the app repository for file system events. The app state is refreshed automatically for all events. """ if self._observer is None and os.path.isdir(self.path): event_handler = self.AppPathFileSystemEventHandler(self) self._observer = Observer() self._observer.schedule(event_handler, self.path, recursive=True) self._observer.start() if self._check_install_status_changed_thread is None: def check_install_status_changed(): installed = self.is_installed() while not self._check_install_status_changed_thread.stop_flag: if installed != self.is_installed(): installed = self.is_installed() self.refresh() sleep(1) self._check_install_status_changed_thread = Thread( target=check_install_status_changed) self._check_install_status_changed_thread.stop_flag = False self._check_install_status_changed_thread.start() def _stop_watch_repository(self, timeout=None): """Stop watching the app repository for file system events.""" if self._observer is not None: self._observer.stop() self._observer.join(timeout=timeout) if not self._observer.isAlive(): self._observer = None if self._check_install_status_changed_thread is not None: self._check_install_status_changed_thread.stop_flag = True self._check_install_status_changed_thread.join(timeout=timeout) if not self._check_install_status_changed_thread.is_alive(): self._check_install_status_changed_thread = None def __del__(self): # pylint: disable=missing-docstring self._stop_watch_repository(1) def in_category(self, category): # One should test what happens if the category won't be defined. return category in self.categories def is_installed(self): """The app is installed if the corresponding folder is present.""" return os.path.isdir(self.path) def _has_git_repo(self): """Check if the app has a .git folder in it.""" try: Repo(self.path) return True except NotGitRepository: return False def install_app(self, version=None): """Installing the app.""" assert self._git_url is not None if version is None: version = 'git:refs/heads/' + AIIDALAB_DEFAULT_GIT_BRANCH with self._show_busy(): assert version.startswith('git:refs/heads/') branch = re.sub(r'git:refs\/heads\/', '', version) if not os.path.isdir(self.path): # clone first check_output([ 'git', 'clone', '--branch', branch, self._git_url, self.path ], cwd=os.path.dirname(self.path), stderr=STDOUT) check_output(['git', 'checkout', '-f', branch], cwd=self.path, stderr=STDOUT) self.refresh() self._watch_repository() return branch def update_app(self, _=None): """Perform app update.""" assert self._git_url is not None with self._show_busy(): fetch(repo=self._repo, remote_location=self._git_url) tracked_branch = self._repo.get_tracked_branch() check_output(['git', 'reset', '--hard', tracked_branch], cwd=self.path, stderr=STDOUT) self.refresh_async() def uninstall_app(self, _=None): """Perfrom app uninstall.""" # Perform uninstall process. with self._show_busy(): self._stop_watch_repository() try: shutil.rmtree(self.path) except FileNotFoundError: raise RuntimeError("App was already uninstalled!") self.refresh() self._watch_repository() def check_for_updates(self): """Check whether there is an update available for the installed release line.""" try: assert self._git_url is not None branch_ref = 'refs/heads/' + self._repo.branch().decode() assert self._repo.get_tracked_branch() is not None remote_update_available = self._git_remote_refs.get( branch_ref) != self._repo.head().decode() self.set_trait( 'updates_available', remote_update_available or self._repo.update_available()) except (AssertionError, RuntimeError): self.set_trait('updates_available', None) def _available_release_lines(self): """"Return all available release lines (local and remote).""" for branch in self._repo.list_branches(): yield 'git:refs/heads/' + branch.decode() for ref in self._git_remote_refs: if ref.startswith('refs/heads/'): yield 'git:' + ref @throttled(calls_per_second=1) def refresh(self): """Refresh app state.""" with self._show_busy(): with self.hold_trait_notifications(): if self.is_installed() and self._has_git_repo(): self.available_release_lines = set( self._available_release_lines()) try: self.installed_release_line = 'git:refs/heads/' + self._repo.branch( ).decode() except RuntimeError: self.installed_release_line = None self.installed_version = self._repo.head() self.check_for_updates() self.set_trait('modified', self._repo.dirty()) else: self.available_release_lines = set() self.installed_release_line = None self.installed_version = None self.set_trait('updates_available', None) self.set_trait('modified', None) def refresh_async(self): """Asynchronized (non-blocking) refresh of the app state.""" refresh_thread = Thread(target=self.refresh) refresh_thread.start() @property def metadata(self): """Return metadata dictionary. Give the priority to the local copy (better for the developers).""" if self.is_installed(): try: with open(os.path.join(self.path, 'metadata.json')) as json_file: return json.load(json_file) except IOError: return dict() elif self._meta_info is not None: return dict(self._meta_info) elif self._meta_url is None: raise RuntimeError( f"Requested app '{self.name}' is not installed and is also not registered on the app registry." ) else: return requests.get(self._meta_url).json() def _get_from_metadata(self, what): """Get information from metadata.""" try: return "{}".format(self.metadata[what]) except KeyError: if not os.path.isfile(os.path.join(self.path, 'metadata.json')): return '({}) metadata.json file is not present'.format(what) return 'the field "{}" is not present in metadata.json file'.format( what) @property def authors(self): return self._get_from_metadata('authors') @property def description(self): return self._get_from_metadata('description') @property def title(self): return self._get_from_metadata('title') @property def url(self): """Provide explicit link to Git repository.""" return self._git_url @property def more(self): return """<a href=./single_app.ipynb?app={}>Manage App</a>""".format( self.name) @property def logo(self): """Return logo object. Give the priority to the local version""" # For some reason standard ipw.Image() app does not work properly. res = ipw.HTML('<img src="./aiidalab_logo_v4.svg">', layout={ 'width': '100px', 'height': '100px' }) # Checking whether the 'logo' key is present in metadata dictionary. if 'logo' not in self.metadata: res.value = '<img src="./aiidalab_logo_v4.svg">' # If 'logo' key is present and the app is installed. elif self.is_installed(): res.value = '<img src="{}">'.format( os.path.join('..', self.name, self.metadata['logo'])) # If not installed, getting file from the remote git repository. else: # Remove .git if present. html_link = os.path.splitext(self._git_url)[0] # We expect it to always be a git repository html_link += '/master/' + self.metadata['logo'] if 'github.com' in html_link: html_link = html_link.replace('github.com', 'raw.githubusercontent.com') if html_link.endswith('.svg'): html_link += '?sanitize=true' res.value = '<img src="{}">'.format(html_link) return res @property def _repo(self): """Returns Git repository.""" if not self.is_installed(): raise AppNotInstalledException("The app is not installed") return Repo(self.path) def render_app_manager_widget(self): """Display widget to manage the app.""" try: return AppManagerWidget(self, with_version_selector=True) except Exception as error: # pylint: disable=broad-except return ipw.HTML( '<div style="font-size: 30px; text-align:center;">' f'Unable to show app widget due to error: {error}' '</div>', layout={'width': '600px'})
class MainWindow(QMainWindow): def __init__(self, parent=None): Logger().debug("Creazione oggetto MainWindow") super(MainWindow, self).__init__(parent) self.ui = UiMainWindow.Ui_MainWindow() self.obs = None self.tailer = None self.tailerThread = None self.setupUi() self.setupSignalsAndSlots() self.startTailerListener() def closeEvent(self, event): Logger().debug("Chiusura applicazione") if isinstance(self.obs, Observer): if self.obs.isAlive(): Logger().info("Chiusura Observer") self.obs.stop() self.obs.join() Logger().info("Observer thread terminato") res = QMetaObject.invokeMethod(self.tailer, "stopProcess", Qt.BlockingQueuedConnection) QCoreApplication.processEvents() self.tailer.thread().wait() Logger().debug("Tailer listener thread terminato") super(MainWindow, self).closeEvent(event) def startTailerListener(self): Logger().debug( "Creazione listener Tailer per visualizzazione file di log") self.tailerThread = QThread() self.tailer = Tailer() self.tailer.moveToThread(self.tailerThread) self.tailerThread.started.connect(self.tailer.startProcess) self.tailer.newLineSignal.connect(self.addLine) self.tailer.stopSignal.connect(self.tailerThread.quit) self.tailer.startSignal.connect( lambda: self.ui.cbLogger.setChecked(True)) self.tailer.stopSignal.connect( lambda: self.ui.cbLogger.setChecked(False)) Logger().debug("Avvio listener thread") self.tailerThread.start() def setupUi(self): self.ui.setupUi(self) self.setWindowTitle(Constants.PROGRAM_NAME + " - version " + Constants.CSV2JSON_VERSION) self.ui.cbLogger.setEnabled(False) self.ui.cbConverter.setEnabled(False) self.ui.leSourcePath.setText(Settings().getSpoolPath()) self.ui.pbStyle.setVisible(Settings().isStyleButtonVisible()) self.ui.pteEditor.setMaximumBlockCount(Settings().getMaxLogRowCount()) self.ui.pteEditor.setReadOnly(True) file = QFile(":/qss/style") file.open(QFile.ReadOnly | QFile.Text) ts = QTextStream(file) styleSheet = ts.readAll() self.setStyleSheet(styleSheet) def setupSignalsAndSlots(self): Logger().debug("Aggancio segnali signals/slots") self.ui.pbStyle.clicked.connect(self.reloadStyle) self.ui.pbStart.clicked.connect(self.start) self.ui.pbStop.clicked.connect(self.stop) def reloadStyle(self): Logger().debug("Caricamento foglio di stile") with open(Settings().getStyleSheetPath()) as fd: self.setStyleSheet(fd.read()) def start(self): Logger().info("Avvio listener dei file CSV") if isinstance(self.obs, Observer): if self.obs.isAlive(): Logger().warning("Observer gia' in esecuzione") return self.obs = Observer() path = self.ui.leSourcePath.text() Logger().info("Path in ascolto: " + path) eventHandler = CustomEventHandler.CustomEventHandler(path) self.obs.schedule(eventHandler, path, recursive=True) self.obs.start() self.ui.leSourcePath.setReadOnly(True) self.ui.cbConverter.setChecked(True) Logger().info("Listener dei file CSV avviato") def stop(self): Logger().info("Chiusura listener dei file CSV") if not isinstance(self.obs, Observer): Logger().info("Nessun Observer avviato") return if self.obs.isAlive(): self.obs.stop() self.obs.join() self.obs = None self.ui.leSourcePath.setReadOnly(False) self.ui.cbConverter.setChecked(False) Logger().info("Listener dei file CSV terminato") @Slot(str) def addLine(self, line): self.ui.pteEditor.appendPlainText(line)
logging.debug("Watchdog received created event - % s." % event.src_path) self.compress_image(event.src_path) def compress_image(self, src_path): cmd = '/usr/local/bin/pngquant 128 --skip-if-larger --strip --ext=.png --force' cmd = cmd.split(" ") cmd.append(src_path) subprocess.run(cmd, stdout=subprocess.DEVNULL) cmd = '/usr/local/bin/zopflipng -y' cmd = cmd.split(" ") cmd.append(src_path) cmd.append(src_path[:-4] + '_compressed.png') #subprocess.run(cmd, stdout=subprocess.DEVNULL) if __name__ == "__main__": logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S') path = '/users/ajaggi/Screenshots' event_handler = MyHandler() observer = Observer() observer.schedule(event_handler, path, recursive=False) observer.start() try: while observer.isAlive(): observer.join(1) except KeyboardInterrupt: observer.stop() observer.join()
def watch(build_commands=None, patterns=None, ignore_patterns=None, exclude_list=None): """TODO(keir) docstring""" _LOG.info('Starting Pigweed build watcher') # Get pigweed directory information from environment variable PW_ROOT. if os.environ['PW_ROOT'] is None: _exit_due_to_pigweed_not_installed() path_of_pigweed = pathlib.Path(os.environ['PW_ROOT']) cur_dir = pathlib.Path(os.getcwd()) if (not (is_subdirectory(path_of_pigweed, cur_dir) or path_of_pigweed == cur_dir)): _exit_due_to_pigweed_not_installed() # Preset exclude list for pigweed directory. exclude_list = get_exclude_list(exclude_list) subdirectories_to_watch \ = minimal_watch_directories(cur_dir, exclude_list) # If no build directory was specified, search the tree for GN build # directories and try to build them all. In the future this may cause # slow startup, but for now this is fast enough. build_commands_tmp = build_commands build_commands = [] if not build_commands_tmp: _LOG.info('Searching for GN build dirs...') gn_args_files = [] if os.path.isfile('out/args.gn'): gn_args_files += ['out/args.gn'] gn_args_files += glob.glob('out/*/args.gn') for gn_args_file in gn_args_files: gn_build_dir = pathlib.Path(gn_args_file).parent gn_build_dir = gn_build_dir.resolve().relative_to(cur_dir) if gn_build_dir.is_dir(): build_commands.append(BuildCommand(gn_build_dir)) else: # Reformat the directory of build commands to be relative to the # currently directory. for i, build_target in enumerate(build_commands_tmp, 1): build_target_dir = build_target.build_dir build_commands.append( BuildCommand(build_target_dir, build_target.targets)) # Make sure we found something; if not, bail. if not build_commands: _die("No build dirs found. Did you forget to 'gn gen out'?") # Verify that the build output directories exist. for i, build_target in enumerate(build_commands, 1): if not build_target.build_dir.is_dir(): _die("Build directory doesn't exist: %s", build_target) else: _LOG.info('Will build [%d/%d]: %s', i, len(build_commands), build_target) _LOG.debug('Patterns: %s', patterns) path_of_directory_to_watch = '.' # Try to make a short display path for the watched directory that has # "$HOME" instead of the full home directory. This is nice for users # who have deeply nested home directories. path_to_log = pathlib.Path(path_of_directory_to_watch).resolve() try: path_to_log = path_to_log.relative_to(pathlib.Path.home()) path_to_log = f'$HOME/{path_to_log}' except ValueError: # The directory is somewhere other than inside the users home. path_to_log = path_of_directory_to_watch # Ignore the user-specified patterns. ignore_patterns = (ignore_patterns.split(_WATCH_PATTERN_DELIMITER) if ignore_patterns else []) ignore_dirs = ['.presubmit', '.python3-env'] env = pw_cli.env.pigweed_environment() if env.PW_EMOJI: charset = _EMOJI_CHARSET else: charset = _ASCII_CHARSET event_handler = PigweedBuildWatcher( patterns=patterns.split(_WATCH_PATTERN_DELIMITER), ignore_patterns=ignore_patterns, build_commands=build_commands, ignore_dirs=ignore_dirs, charset=charset, ) try: # It can take awhile to configure the filesystem watcher, so have the # message reflect that with the "...". Run inside the try: to # gracefully handle the user Ctrl-C'ing out during startup. _LOG.info('Attaching filesystem watcher to %s/...', path_to_log) # Observe changes for all files in the root directory. Whether the # directory should be observed recursively or not is determined by the # second element in subdirectories_to_watch. observers = [] for directory, rec in subdirectories_to_watch: observer = Observer() observer.schedule( event_handler, str(directory), recursive=rec, ) observer.start() observers.append(observer) event_handler.debouncer.press('Triggering initial build...') for observer in observers: while observer.isAlive(): observer.join(1) # Ctrl-C on Unix generates KeyboardInterrupt # Ctrl-Z on Windows generates EOFError except (KeyboardInterrupt, EOFError): _exit_due_to_interrupt() except OSError as err: if err.args[0] == _ERRNO_INOTIFY_LIMIT_REACHED: _exit_due_to_inotify_limit() else: raise err _LOG.critical('Should never get here') observer.join()
def watch(build_commands=None, patterns=None, ignore_patterns=None): """TODO(keir) docstring""" _LOG.info('Starting Pigweed build watcher') # If no build directory was specified, search the tree for GN build # directories and try to build them all. In the future this may cause # slow startup, but for now this is fast enough. if not build_commands: build_commands = [] _LOG.info('Searching for GN build dirs...') gn_args_files = glob.glob('**/args.gn', recursive=True) for gn_args_file in gn_args_files: gn_build_dir = pathlib.Path(gn_args_file).parent if gn_build_dir.is_dir(): build_commands.append(BuildCommand(gn_build_dir)) # Make sure we found something; if not, bail. if not build_commands: _die("No build dirs found. Did you forget to 'gn gen out'?") # Verify that the build output directories exist. for i, build_target in enumerate(build_commands, 1): if not build_target.build_dir.is_dir(): _die("Build directory doesn't exist: %s", build_target) else: _LOG.info('Will build [%d/%d]: %s', i, len(build_commands), build_target) _LOG.debug('Patterns: %s', patterns) # TODO(keir): Change the watcher to selectively watch some # subdirectories, rather than watching everything under a single path. # # The problem with the current approach is that Ninja's building # triggers many events, which are needlessly sent to this script. path_of_directory_to_watch = '.' # Try to make a short display path for the watched directory that has # "$HOME" instead of the full home directory. This is nice for users # who have deeply nested home directories. path_to_log = pathlib.Path(path_of_directory_to_watch).resolve() try: path_to_log = path_to_log.relative_to(pathlib.Path.home()) path_to_log = f'$HOME/{path_to_log}' except ValueError: # The directory is somewhere other than inside the users home. path_to_log = path_of_directory_to_watch # Ignore the user-specified patterns. ignore_patterns = (ignore_patterns.split(_WATCH_PATTERN_DELIMITER) if ignore_patterns else []) ignore_dirs = ['.presubmit', '.python3-env'] env = pw_cli.env.pigweed_environment() if env.PW_EMOJI: charset = _EMOJI_CHARSET else: charset = _ASCII_CHARSET event_handler = PigweedBuildWatcher( patterns=patterns.split(_WATCH_PATTERN_DELIMITER), ignore_patterns=ignore_patterns, build_commands=build_commands, ignore_dirs=ignore_dirs, charset=charset, ) try: # It can take awhile to configure the filesystem watcher, so have the # message reflect that with the "...". Run inside the try: to # gracefully handle the user Ctrl-C'ing out during startup. _LOG.info('Attaching filesystem watcher to %s/...', path_to_log) observer = Observer() observer.schedule( event_handler, path_of_directory_to_watch, recursive=True, ) observer.start() event_handler.debouncer.press('Triggering initial build...') while observer.isAlive(): observer.join(1) # Ctrl-C on Unix generates KeyboardInterrupt # Ctrl-Z on Windows generates EOFError except (KeyboardInterrupt, EOFError): _exit_due_to_interrupt() _LOG.critical('Should never get here') observer.join()
print " ------- ON CREATED" print event.src_path self.process(event) # with open('/home/gugli/Documents/script_py/Dainik_Jagron/checkDownStatus.txt','w') as outFile: # outFile.write("CREATED") # self.process(event) if __name__ == '__main__': args = sys.argv[1:] observer = Observer() DIRECTORY_TO_WATCH = "/home/gugli/Documents/script_py/Dainik_Jagron" #observer.schedule(MyHandler(), path=args[0] if args else '.') """ You can set the named-argument "recursive" to True for observer.schedule. if you want to watch for files in subfolders.""" observer.schedule(MyHandler(), path=DIRECTORY_TO_WATCH) print " ----------------- " observer.start() try: while True: print "==========", observer.isAlive() time.sleep(1) except KeyboardInterrupt: observer.stop() observer.join()