def test_should_add_piece_to_remote_collection(self): random_remote_collection_title = uuid.uuid4().hex crawler = PieceCrawler( path=self.FIXTURE_FILES_PATH, user=self.authentified_user(), remote_collection_title=random_remote_collection_title) piece_1_path = "/etc/movie/jumanji-1995.mkv" piece_2_path = "/etc/movie/Dikkenek (2006).avi" crawler.on_created(FileSystemEvent(piece_1_path)) crawler.on_created(FileSystemEvent(piece_2_path)) self.assertCountGreater(crawler.remote_collection.piece_list(), 1)
def test_should_add_new_piece_to_local_and_remote_collection(self, mock_add, *mocks): user = User(email="*****@*****.**", password="******", username="******") crawler = PieceCrawler(path="some/path", user=user) piece_1_path = "/etc/movie/jumanji-1995.mkv" piece_2_path = "/etc/movie/Dikkenek (2006).avi" crawler.on_created(FileSystemEvent(piece_1_path)) crawler.on_created(FileSystemEvent(piece_2_path)) self.assertEqual([ Piece(path=piece_1_path, guess=guessit.guessit(piece_1_path)), Piece(path=piece_2_path, guess=guessit.guessit(piece_2_path)), ], crawler.local_collection().piece_list()) self.assertEqual(2, mock_add.call_count)
def on_modified(self, event): global lastUpdated global watchHandler global lastUpdateCheckTimer #print("Directory modified") if lastUpdated is not None: if (datetime.datetime.now() - lastUpdated).total_seconds() >= 5: print("Last update > 5 seconds ago, sending files.") lastUpdated = datetime.datetime.now() updateUISendFile() else: #Wait 5 seconds after update so that we do not have a connection for each time the user types and updates print("Last update < 5 seconds, will check back...") if lastUpdateCheckTimer is not None: lastUpdateCheckTimer.cancel() monitorDir = getSQLiteDirectory() if os.path.exists(monitorDir) is False: monitorDir = getSNTDirectory() #schedule a handler event in 5 seconds lastUpdateCheckTimer = threading.Timer( 5, lambda: watchHandler.on_modified( FileSystemEvent(monitorDir))) lastUpdateCheckTimer.start() if lastUpdated is None: print("Updating") lastUpdated = datetime.datetime.now() updateUISendFile()
def on_moved(self, event): src = os.path.basename(event.src_path) src_match = re.match(self.pattern, src) is not None dst = os.path.basename(event.dest_path) dst_match = re.match(self.pattern, dst) is not None if not src_match and dst_match: return self.on_created(FileSystemEvent(event.dest_path)) elif src_match and not dst_match: return self.on_deleted(FileSystemEvent(event.src_path)) self.logger.info("File renamed: {} -> {}".format(src, dst)) need_dump = False for sha, signal_data in self.data.items(): if signal_data["file_name"] == src: signal_data["file_name"] = dst need_dump = bool(signal_data["annotation"]) if need_dump: self._dump_annotation() self._log_data() self.namespace.on_ECG_GET_LIST({}, {})
def initial_processing(self, watch_directories, event_handler): for watch_directory in watch_directories: for root, dir_names, file_names in os.walk(watch_directory): for filename in file_names: file_path = os.path.join(root, filename).decode('utf-8') if file_path not in self.mfq or ( file_path in self.mfq and self.mfq[file_path].status != MediaFileState.FAILED): file_event = FileSystemEvent(file_path) file_event.is_directory = False file_event.event_type = EVENT_TYPE_CREATED event_handler.on_any_event(file_event)
def _on_moved(self, event): """Used for programs, such as gedit, that modify documents by moving (overwriting) the previous document with the temporary file. Only the moved event contains the name of the destination file.""" try: event = FileSystemEvent(event.dest_path) self._on_modified(event) except KeyboardInterrupt: for observer in self.observers: observer.stop() except Exception as err: restart("Error on moved: " + str(err) + "\nRestarting watch.")
def test_directory(database): session = database() study = Study() study.name = 'STUDY' session.add(study) session.commit() test_path = '/test/STUDY/dir' handler = FsEventHandler(database, '/test') event = FileSystemEvent(test_path) event.is_directory = True handler.on_created(event) f = session.query(ReceivedDir).filter_by(dirname=test_path).one() assert f.dirname == test_path assert f.study.name == 'STUDY'
def test_file(database): session = database() study = Study() study.name = 'STUDY' session.add(study) new_dir = ReceivedDir() new_dir.dirname = '/test/STUDY/dir' new_dir.study = study session.add(new_dir) session.commit() test_path = '/test/STUDY/dir/file' handler = FsEventHandler(database, '/test') event = FileSystemEvent(test_path) handler.on_created(event) f = session.query(ReceivedFile).filter_by(filename=test_path).one() assert f.filename == test_path assert f.directory.dirname == '/test/STUDY/dir' assert f.study.name == 'STUDY'
def ssh_worker(self): opts={ 'port' : int(self.profile['port_menu']), 'user' : self.profile['user'], 'host' : self.profile['address'], 'private_key' : self.profile['ssh_key_path'] } ln_filter=Tail.get_log_filter('groups',min_date=None) c = _ssh.Client(**opts) c.connect() c.list() while self.running == True: time.sleep(1) for line in c.new_lines(): m = ln_filter(line) if not m: continue dest_path = None src_path = None event = None if len(m) >=4: src_path = base64.standard_b64decode(m[3]) src_path = os.path.join(self.profile['local_root'],src_path) event =FileSystemEvent(m[2], src_path ,True) if len(m) >=5 and not m[4] is None: dest_path = base64.standard_b64decode(m[3]) dest_path = os.path.join(self.profile['local_root'],dest_path) event =FileSystemMovedEvent(src_path, dest_path ,True) if not event is None: self.cmd_create(event) self.log.info( 'ssh_worker shutting down' ) if c.running: c.close()
def __init__(self): super(MyWebService, self).__init__() self.filterKeywordsAndScores = self.config.get_keywords('keywords') self.toppics_for_group = {} self.all_toppics = {} self.sorted_toppics_list = [] self.data_dir = data_dir_path() self.watcher = DirectoryWatcher(self.data_dir, self._data_file_update) self.watcher.run() init_files = filter(lambda x: x.endswith('.json'), os.listdir(self.data_dir)) for file in init_files: self._data_file_update(FileSystemEvent(os.path.join(self.data_dir, file))) self.router.add_route('GET', '/', self.toppics) self.router.add_route('GET', '/zufang/all', self.toppics)
def test_event_type(self): event1 = FileSystemEvent(EVENT_TYPE_DELETED, path_1, False) event2 = FileSystemEvent(EVENT_TYPE_CREATED, path_2, True) assert_equal(EVENT_TYPE_DELETED, event1.event_type) assert_equal(EVENT_TYPE_CREATED, event2.event_type)
def test___str__(self): event = FileSystemEvent(EVENT_TYPE_MODIFIED, path_1, False) assert_equal('<FileSystemEvent: event_type=%s, src_path=%s, is_directory=%s>' \ % (EVENT_TYPE_MODIFIED, path_1, False), event.__str__())
def test___ne__(self): event1 = FileSystemEvent(EVENT_TYPE_MODIFIED, path_1, True) event2 = FileSystemEvent(EVENT_TYPE_MODIFIED, path_2, True) assert_true(event1.__ne__(event2))
def test___init__(self): event = FileSystemEvent(EVENT_TYPE_MODIFIED, path_1, True) assert_equal(event.src_path, path_1) assert_equal(event.event_type, EVENT_TYPE_MODIFIED) assert_equal(event.is_directory, True)
def test___hash__(self): event1 = FileSystemEvent(EVENT_TYPE_DELETED, path_1, False) event2 = FileSystemEvent(EVENT_TYPE_DELETED, path_1, False) event3 = FileSystemEvent(EVENT_TYPE_DELETED, path_2, False) assert_equal(event1.__hash__(), event2.__hash__()) assert_not_equal(event1.__hash__(), event3.__hash__())
def test_behavior_readonly_public_attributes(self): event = FileSystemEvent(EVENT_TYPE_MODIFIED, path_1, True) assert_readonly_public_attributes(event)
def trigger_event(): return FileSystemEvent("my/test/path.txt")
def run(mode, source, destination, sftp_host, sftp_port, hostkey, userkey, keypass=None, username=None, password=None, logfile=None, loglevel='INFO', stopsync=threading.Event(), **kargs): loglevel = "DEBUG" sync_logger = logging.getLogger("sync") event_logger = logging.getLogger("event") sync_logger.setLevel(getattr(logging, loglevel)) event_logger.setLevel(logging.DEBUG) formatter = logging.Formatter('%(levelname)s: %(asctime)s %(message)s') ch = logging.StreamHandler() ch.setLevel(getattr(logging, loglevel)) ch.setFormatter(formatter) if logfile: fh = logging.FileHandler(logfile, mode='w') fh.setLevel(getattr(logging, loglevel)) fh.setFormatter(formatter) sync_logger.addHandler(fh) else: sync_logger.addHandler(ch) event_logger.addHandler(logging.FileHandler("events", mode="w")) paramiko_logger = logging.getLogger("paramiko.transport") paramiko_logger.addHandler(logging.NullHandler()) sync_logger.info("Connect source and destination ...<br />") local = OSFileSystem(root=source) remote = None if mode == 'local': remote = OSFileSystem(root=destination) elif mode == 'remote': try: client = SFTPClient.connect(sftp_host, sftp_port, hostkey, userkey, keypass, username, password) except: sync_logger.error("Connection failed!<br />") local.observer.stop() local.observer.join() raise remote = SFTPFileSystem(client) if not remote: sync_logger.error("Connection failed!<br />") raise Exception("Connection failed.") sync_events_thread = threading.Thread( target=sync_events, args=[local, remote, local.eventQueue, stopsync, thread_lock]) sync_events_thread.name = "SyncEvents" sync_all_files(local, remote, local.root) sync_all_files(remote, local, remote.root) poll_events(local, remote, stopsync) sync_events_thread.start() #print threading.enumerate() while not stopsync.isSet(): time.sleep(1) if mode == 'local': remote.observer.stop() local.observer.stop() local.eventQueue.put(FileSystemEvent("SyncStopEvent", "")) sync_events_thread.join() if sync_all_thread: sync_all_thread.cancel() sync_all_thread.join() if poll_thread: poll_thread.cancel() poll_thread.join() local.observer.join()
def startup_cmd(self): event =FileSystemEvent('startup',self.profile['local_root'],True) self.cmd_create(event)
def test_behavior_readonly_public_attributes(self): event = FileSystemEvent(EVENT_TYPE_MODIFIED, path_1, True) for prop in list_attributes(event): self.assertRaises(AttributeError, setattr, event, prop, None)
def test___eq__(self): event1 = FileSystemEvent(EVENT_TYPE_MODIFIED, path_1, True) event2 = FileSystemEvent(EVENT_TYPE_MODIFIED, path_1, True) self.assertTrue(event1.__eq__(event2))
def do_first_run_event(name: str) -> None: event = FileSystemEvent(name) event.event_type = _FIRST_RUN_EVENT self._on_event_thrown(event)
def test_is_directory(self): event1 = FileSystemEvent(EVENT_TYPE_MODIFIED, path_1, True) event2 = FileSystemEvent(EVENT_TYPE_MODIFIED, path_1, False) assert_true(event1.is_directory) assert_false(event2.is_directory)
def test_src_path(self): event1 = FileSystemEvent(EVENT_TYPE_CREATED, path_1, True) event2 = FileSystemEvent(EVENT_TYPE_CREATED, path_2, False) assert_equal(path_1, event1.src_path) assert_equal(path_2, event2.src_path)
def test_restarter_triggers_event(): restart_event = threading.Event() restarter = reloader.Restarter(restart_event) app_modified = FileSystemEvent(src_path='./app.py') restarter.on_any_event(app_modified) assert restart_event.is_set()
def generate_events(count: int, name: str, sleep: float): for _ in range(count): handler.dispatch(FileSystemEvent(name)) time.sleep(sleep)
from watchdog.events import FileSystemEvent handler = FileSystemEvent() print('End')