Ejemplo n.º 1
0
    def __setupInotifyMonitor(self, path, inotify_types, glob_pattern):

        '''

        Initializes an inotify monitor process on ``path`` and yields the
        defined ``inotify_types`` generated on the paths matching the
        ``glob_pattern``.

        :param str path: The path to monitor
        :param list inotify_types: A list of inotify types to monitor
        :param str glob_pattern: Paths need to match in order to be returned.
        :return: generator

        '''

        file_exists = True
        i = Inotify()
        i.add_watch(path)

        while file_exists and self.loop():
            for event in i.event_gen(yield_nones=False):
                if event is not None:
                    for inotify_type in event[1]:
                        if inotify_type in inotify_types or inotify_types == []:
                            abs_path = os.path.abspath("%s/%s" % (event[2], event[3]))
                            if fnmatch.fnmatch(abs_path, glob_pattern):
                                yield abs_path.rstrip('/'), inotify_type
                        if inotify_type == "IN_DELETE_SELF":
                            file_exists = False
                            break
                else:
                    sleep(1)
Ejemplo n.º 2
0
def main():
    from inotify.adapters import Inotify
    i = Inotify()
    i.add_watch('.')
    watched_files = sys.argv[1:]
    # watched_file = (sys.argv + [None])[1]
    # import os
    # common = os.path.commonpath([os.getcwd(), watched_file])
    # watched_file = watched_file[1+len(common):]
    # print(common, watched_file)
    with kernel() as k:
        for f in watched_files:
            try:
                processed_to_stdout(k.process(open(f, 'r').read()))
            except FileNotFoundError:
                pass
        for event in i.event_gen(yield_nones=False):
            (_, event_type, _, filename) = event
            # print(event_type, filename)

            if event_type == ['IN_CLOSE_WRITE'] and filename in watched_files:
                processed_to_stdout(k.process(open(filename, 'r').read()))

            if event_type == ['IN_CLOSE_WRITE'] and filename == '.requests':
                try:
                    request, body = open(filename, 'r').read().split('\n', 1)
                    handle_request(k, body, *request.split(' '))
                except:
                    print('Invalid request:', str(open(filename, 'r').read()).split('\n')[0])
                    import traceback
                    print(traceback.format_exc())
                    continue
Ejemplo n.º 3
0
 def _get_events(self):
     inotify_watcher = Inotify()
     inotify_watcher.add_watch(self.partialsPath, mask=self.MASK)
     for event in inotify_watcher.event_gen():
         if not event:
             continue
         yield event
Ejemplo n.º 4
0
        def __init__(self, extra_files=None, callback=None):
            super().__init__()
            self.setDaemon(True)
            self._callback = callback
            self._dirs = set()
            self._watcher = Inotify()

            for extra_file in extra_files:
                self.add_extra_file(extra_file)
Ejemplo n.º 5
0
    def __init__(self, path, mask=IN_ALL_EVENTS, block_duration_s=1):

        self.__root_path = path

        # No matter what we actually received as the mask, make sure we have
        # the minimum that we require to curate our list of watches.
        self.__mask = mask | IN_ISDIR | IN_CREATE | IN_DELETE

        self.__i = Inotify(block_duration_s=block_duration_s)

        self.__load_tree(path)
Ejemplo n.º 6
0
    def __init__(self):
        self._inotify = Inotify()
        self._inotify_mask = IN_ALL_EVENTS & (
            ~IN_ACCESS & ~IN_OPEN & ~IN_CLOSE_NOWRITE & ~IN_CLOSE_WRITE)

        self._dir_queue = multiprocessing.Queue()
        self._sync_queue = multiprocessing.Queue()
        self._watched_dirs = {}  # {lib_name: set(dirs)}

        super().__init__(target=self._main,
                         args=(self._dir_queue, self._sync_queue))
Ejemplo n.º 7
0
    def watch(self):
        print(
            "--- Orchestator mode engaged. Watching for changes in SFC configuration at "
            + str(self._sfc_config_directory))
        i = Inotify()
        i.add_watch(str(self._sfc_config_directory))

        for event in i.event_gen(yield_nones=False):
            (_, type_name, path, filename) = event

            # if changes have been written to the file
            if 'IN_CLOSE_WRITE' in type_name and self._sfc_config_filename == filename:
                print("Write-event to {}{} registered {}, reevaluating...\n".
                      format(path, filename, type_name))

                new_sfcs = self.read_json_config(file=self._sfc_config_path)
                print('New SFCs: {}\n'.format(
                    simplejson.dumps(new_sfcs, indent=4)))

                deleted_sfcs = [x for x in self.sfcs if x not in new_sfcs]

                for sfc_id in deleted_sfcs:
                    print('Delete SFC {}:'.format(sfc_id))

                    if '/' in str(
                            self.sfcs[sfc_id]['trafficType']['ipAddress']):
                        split = str(self.sfcs[sfc_id]['trafficType']
                                    ['ipAddress']).split('/')
                        ip_address = split[0]
                        prefix_length = split[1]
                    else:
                        print(
                            'Traffic destination IP given without prefix! Aborting...'
                        )
                        exit(1)

                    # Traffic from customer
                    self._ingress_switch_controller.delete_mpls_sr_rule(
                        self._label_stacks[sfc_id], 'src', ip_address,
                        prefix_length)

                    label_stacks_backwards = self.generate_label_stacks_backwards(
                    )
                    # Traffic to customer
                    self._ingress_switch_controller.delete_mpls_sr_rule(
                        label_stacks_backwards[sfc_id], 'dst', ip_address,
                        prefix_length)

                self.apply_sfc_policies(new_sfcs)

                print(
                    "----- Orchestator mode engaged. Watching for changes in SFC configuration at {} ----- "
                    .format(self._sfc_config_directory))
Ejemplo n.º 8
0
    def tail(self):
        watch_mask = (constants.IN_ALL_EVENTS ^ constants.IN_ACCESS ^
                      constants.IN_OPEN ^ constants.IN_CLOSE_NOWRITE)
        i = Inotify()
        i.add_watch(self.filename, mask=watch_mask)
        while True:
            # Consume as much as possible.
            yield from self.resume()

            # Now wait for a change that we can react to
            ev = self.wait_actionable(i)

            if ev == 'append':
                continue

            if ev == 'swap':
                # Need to reach around since file-deletion removes C watches,
                # but not the python one...
                try:
                    i.remove_watch(self.filename)
                except Exception:
                    pass
                i.add_watch(self.filename, mask=watch_mask)
                self.pos = 1
                continue
Ejemplo n.º 9
0
    class InotifyReloader(threading.Thread):
        event_mask = (inotify.constants.IN_CREATE | inotify.constants.IN_DELETE
                      | inotify.constants.IN_DELETE_SELF
                      | inotify.constants.IN_MODIFY
                      | inotify.constants.IN_MOVE_SELF
                      | inotify.constants.IN_MOVED_FROM
                      | inotify.constants.IN_MOVED_TO)

        def __init__(self, extra_files=None, callback=None):
            super().__init__()
            self.setDaemon(True)
            self._callback = callback
            self._dirs = set()
            self._watcher = Inotify()

            for extra_file in extra_files:
                self.add_extra_file(extra_file)

        def add_extra_file(self, filename):
            dirname = os.path.dirname(filename)

            if dirname in self._dirs:
                return

            self._watcher.add_watch(dirname, mask=self.event_mask)
            self._dirs.add(dirname)

        def get_dirs(self):
            fnames = [
                os.path.dirname(COMPILED_EXT_RE.sub('py', module.__file__))
                for module in tuple(sys.modules.values())
                if getattr(module, '__file__', None)
            ]

            return set(fnames)

        def run(self):
            self._dirs = self.get_dirs()

            for dirname in self._dirs:
                self._watcher.add_watch(dirname, mask=self.event_mask)

            for event in self._watcher.event_gen():
                if event is None:
                    continue

                filename = event[3]

                self._callback(filename)
Ejemplo n.º 10
0
    def run(self):
        watch_mask = constants.IN_ALL_EVENTS

        print("Starting FsMonitor")
        i = Inotify()
        i.add_watch('gossip_store', mask=watch_mask)
        for event in i.event_gen(yield_nones=False):
            (e, type_names, path, filename) = event
            if e.mask & constants.IN_DELETE_SELF:
                i.remove_watch('gossip_store')
                i.add_watch('gossip_store', mask=watch_mask)
Ejemplo n.º 11
0
    def __init__(self, sets: Sets) -> None:
        self.se = sets

        self.addons = ["venv"]

        unknown_addons = [a for a in self.se.addons if a not in self.addons]
        if unknown_addons:
            raise RuntimeError(f"Unknown addons {unknown_addons}")

        self.inotify = Inotify()

        self._environ_before: Dict[str, Any] = os.environ.copy()

        self.shell = Shell.create()

        self.env_dirs = self._get_env_dirs()
Ejemplo n.º 12
0
def run_inotify(watch_dir, move_to_dir):
    logging.info("Starting move_and_process, watch: %s, move_to: %s",
                 watch_dir, move_to_dir)
    i = Inotify(block_duration_s=300)
    i.add_watch(watch_dir, constants.IN_MOVED_TO)
    for evt in i.event_gen():
        if evt is None:
            # Call every block_duration_s seconds when there is nothing else to do
            run_periodic(watch_dir, move_to_dir)
            continue
        (_header, type_names, _path, fn) = evt
        if 'IN_ISDIR' not in type_names or not fn.isdigit():
            logging.info('Skipped %s' % fn)
            continue
        logging.info('Found %s' % fn)
        handle_file(watch_dir, move_to_dir, int(fn))
Ejemplo n.º 13
0
    class InotifyReloader(threading.Thread):
        event_mask = (inotify.constants.IN_CREATE | inotify.constants.IN_DELETE
                      | inotify.constants.IN_DELETE_SELF | inotify.constants.IN_MODIFY
                      | inotify.constants.IN_MOVE_SELF | inotify.constants.IN_MOVED_FROM
                      | inotify.constants.IN_MOVED_TO)

        def __init__(self, extra_files=None, callback=None):
            super().__init__()
            self.setDaemon(True)
            self._callback = callback
            self._dirs = set()
            self._watcher = Inotify()

            for extra_file in extra_files:
                self.add_extra_file(extra_file)

        def add_extra_file(self, filename):
            dirname = os.path.dirname(filename)

            if dirname in self._dirs:
                return

            self._watcher.add_watch(dirname, mask=self.event_mask)
            self._dirs.add(dirname)

        def get_dirs(self):
            fnames = [
                os.path.dirname(COMPILED_EXT_RE.sub('py', module.__file__))
                for module in tuple(sys.modules.values())
                if getattr(module, '__file__', None)
            ]

            return set(fnames)

        def run(self):
            self._dirs = self.get_dirs()

            for dirname in self._dirs:
                self._watcher.add_watch(dirname, mask=self.event_mask)

            for event in self._watcher.event_gen():
                if event is None:
                    continue

                filename = event[3]

                self._callback(filename)
Ejemplo n.º 14
0
    def __init__(self, sets: Sets) -> None:
        self.se = sets

        self.addons = ["venv"]

        unknown_addons = [a for a in self.se.addons if a not in self.addons]
        if unknown_addons:
            raise EnvoError(f"Unknown addons {unknown_addons}")

        self.inotify = Inotify()

        self.env_dirs = self._get_env_dirs()
        self.quit: bool = False

        self.environ_before = os.environ.copy()  # type: ignore

        self._set_context_thread: Optional[Thread] = None
Ejemplo n.º 15
0
        def __init__(self, extra_files=None, callback=None):
            super().__init__()
            self.setDaemon(True)
            self._callback = callback
            self._dirs = set()
            self._watcher = Inotify()

            for extra_file in extra_files:
                self.add_extra_file(extra_file)
Ejemplo n.º 16
0
    def run(self):
        logging.info("Running notify thread. First scan files from last run")
        for f in os.listdir(self.folder):
            if os.path.isfile(os.path.join(self.folder,f)):
                logging.info("File %s noticed unprocessed notify thread for project %s in %s"%(f,
                                                                                      self.project_id,self.folder))
                processing.Processing.process_incoming_file(self.folder,f,self.project_id)

        logging.info("Scan complete, start watching events")
        i = Inotify()
        i.add_watch(self.folder)

        for event in i.event_gen(yield_nones=False):
            (header, type_names, watch_path, filename) = event
            if ('IN_CLOSE_WRITE' in type_names):
                logging.info("File %s noticed by notify thread for project %s in %s"%(filename,
                                                                                      self.project_id,watch_path))
                processing.Processing.process_incoming_file(watch_path,filename,self.project_id)
Ejemplo n.º 17
0
def run_daemon(args):
    # Main loop waiting on inotify file events
    inotify = Inotify(block_duration_s=1)  # event_gen blocks for 1 second
    inotify.add_watch(args.synapse_service_dir.encode(),
                      IN_MOVED_TO | IN_MODIFY)
    services_by_dependencies_time = 0

    for event in inotify.event_gen(
    ):  # blocks for only up to 1 second at a time
        if services_by_dependencies_time + args.update_secs < time.time():
            services_by_dependencies = smartstack_dependencies_of_running_firewalled_services(
                soa_dir=args.soa_dir, )
            services_by_dependencies_time = time.time()

        if event is None:
            continue

        process_inotify_event(event, services_by_dependencies, args.soa_dir,
                              args.synapse_service_dir)
Ejemplo n.º 18
0
class InotifyRecursive(object):
    def __init__(self, path, mask=IN_ALL_EVENTS, block_duration_s=1):

        self.__root_path = path

        # No matter what we actually received as the mask, make sure we have
        # the minimum that we require to curate our list of watches.
        self.__mask = mask | IN_ISDIR | IN_CREATE | IN_DELETE

        self.__i = Inotify(block_duration_s=block_duration_s)

        self.__load_tree(path)

    def __load_tree(self, path):
        q = [path]
        while q:
            current_path = q[0]
            del q[0]

            self.__i.add_watch(current_path, self.__mask)

            for filename in os.listdir(current_path):
                entry_filepath = os.path.join(current_path, filename)
                if os.path.isdir(entry_filepath) is False:
                    continue

                q.append(entry_filepath)

    def event_gen(self):
        for event in self.__i.event_gen():
            if event is not None:
                (header, type_names, path, filename) = event

                if header.mask & IN_ISDIR:
                    full_path = os.path.join(path, filename)
                    if header.mask & IN_CREATE:
                        self.__i.add_watch(full_path, self.__mask)

                        # solved minor situations that the inotify cannot discover the change
                        for root, dirs, files in os.walk(full_path):
                            for name in dirs:
                                self.__i.add_watch(os.path.join(root, name))

                    elif header.mask & IN_DELETE:
                        self.__i.remove_watch(full_path, superficial=True)
            yield event
Ejemplo n.º 19
0
    def __init__(self):
        self._inotify = Inotify()
        self._inotify_mask = IN_ALL_EVENTS & (~IN_ACCESS &
                                              ~IN_OPEN &
                                              ~IN_CLOSE_NOWRITE &
                                              ~IN_CLOSE_WRITE)

        self._dir_queue = multiprocessing.Queue()
        self._sync_queue = multiprocessing.Queue()
        self._watched_dirs = {}   # {lib_name: set(dirs)}

        super().__init__(target=self._main, args=(self._dir_queue,
                                                  self._sync_queue))
Ejemplo n.º 20
0
    class InotifyReloader(Reloader):

        event_mask = (inotify.constants.IN_CREATE | inotify.constants.IN_DELETE
                      | inotify.constants.IN_DELETE_SELF
                      | inotify.constants.IN_MODIFY
                      | inotify.constants.IN_MOVE_SELF
                      | inotify.constants.IN_MOVED_FROM
                      | inotify.constants.IN_MOVED_TO)

        def __init__(self, extra_files=None, callback=None):
            super(InotifyReloader, self).__init__(extra_files, callback)
            self.watcher = Inotify()

        def get_files_or_dir(self):
            dirs = set([os.path.dirname(path) for path in self.file])
            self.file = dirs

        def add_extra(self, extra):
            dirname = os.path.dirname(extra)

            if dirname in self.file:
                return

            self.watcher.add_watch(dirname, mask=self.event_mask)
            self.file.add(dirname)

        def run(self):
            for dirname in self.file:
                self.watcher.add_watch(dirname, mask=self.event_mask)

            for event in self.watcher.event_gen():
                if event is None:
                    continue
                filename = event[3]

                self.callback(filename)
Ejemplo n.º 21
0
def inotify(args):
    "Setup to run inotify loop"
    setup_logging(args)
    inot = Inotify()
    inot.add_watch(str(args.session), mask=IN_CREATE ^ IN_DELETE)
    global hooks
    if args.hooks:
        hooks = import_user(args.hooks)
    else:
        hooks = None
    try:
        con = sqlite3.connect(str(args.sql_file),
                              detect_types=sqlite3.PARSE_DECLTYPES)
        logging.info("Repopulating database.")
        populate_session_tbl(con, args.session, args.no_action, args=args)
        queues = OrderedDict((('create', deque()), ('retry_create', deque()),
                              ('remove', deque())))
        qfuncs = {
            'create': qfunc_create,
            'retry_create': qfunc_retry_create,
            'remove': hooks_and_remove_torrent
        }
        inot_funcs = {
            ('torrent', ('IN_CREATE', )): queues['create'].append,
            ('torrent', ('IN_DELETE', )): queues['remove'].append
        }
        complete_hook = getattr(hooks, 'complete', None)
        if complete_hook:
            queues['complete'] = deque()
            inot_funcs[('complete',
                        ('IN_CREATE', ))] = queues['complete'].append
            qfuncs['complete'] = complete_hook
        logging.info("Entering inotify loop.")
        preloop_hook = getattr(hooks, 'pre_loop', None)
        if preloop_hook:
            preloop_hook(con, inot, args, queues, qfuncs, inot_funcs)
        queues.move_to_end('remove')
        while True:
            try:
                inotify_loop(con, inot, args, queues, qfuncs, inot_funcs)
            except rTorEventException as e:
                logging.exception("Something happened.")
            except (KeyboardInterrupt, SystemExit):
                logging.info("Exiting due to interrupt.")
                raise
            except Exception:
                logging.exception("Unhandled exception.")
                raise
    finally:
        postloop_hook = getattr(hooks, 'post_loop', None)
        if postloop_hook:
            postloop_hook(con, inot, args)
        inot.remove_watch(bytes(args.session))
        con.close()
Ejemplo n.º 22
0
def watch_prefix_file(file_name):
    """ Using inotify function is looking for IN_CLOSE_WRITE events, that happens when pmacct is pushing new data to
        _PMACCT_DATA file. write_to_db is called to store new data into database. On every iteration main thread
        status is checked.
    """
    inotify_obj = Inotify()
    inotify_obj.add_watch(file_name)
    try:
        for event in inotify_obj.event_gen():
            if event is not None:
                if event[1] == ['IN_CLOSE_WRITE']:
                    logger.debug("Found IN_CLOSE_WRITE event")
                    write_to_db()
            else:
                if not main_thread().is_alive():
                    logger.error('Main thread died, stopping all child threads')
                    # Canceling Timer thread
                    timer_obj.cancel()
                    # Breaking watcher thread loop
                    break
    finally:
        inotify_obj.remove_watch(file_name)
Ejemplo n.º 23
0
def main():
    monitor = Inotify()
    monitor_messaging_manager = MessagingManager(MessagingManagerType.SERVER, "tcp://127.0.0.1:5555")
    watch_paths_list = [ b'/home/anton/test/' ]
    pair_events_list = []
    stop_events_processing_flag = Event()

    for path in watch_paths_list:
        monitor.add_watch(path, inotify_constants.IN_MOVE | inotify_constants.IN_MOVED_TO | inotify_constants.IN_MOVED_FROM | inotify_constants.IN_CREATE)

    try:      
        while True:
            messages = monitor_messaging_manager.get_all_received_messages()
            process_received_messages(monitor, messages)
            
            stop_events_processing_timer = Timer(10, stop_events_processing_flag.set)
            stop_events_processing_timer.start()
            process_received_events(monitor, monitor_messaging_manager, pair_events_list, stop_events_processing_flag)

    except KeyboardInterrupt:
        for path in watch_paths_list:
            monitor.remove_watch(path)
        raise SystemExit()
Ejemplo n.º 24
0
#!/usr/bin/env python3

from pathlib import Path
import sched
import signal
from time import strftime, localtime, time, sleep, mktime, strptime
from exif import Image
from inotify.adapters import Inotify
import magic

i = Inotify()
i.add_watch('/mnt/current')
s = sched.scheduler(time)

signal.signal(signal.SIGTERM, signal.default_int_handler)


def get_creation_time(file):
    if magic.from_file(str(file), mime=True).startswith('image/'):
        try:
            with file.open('rb') as content:
                exif = Image(content)
                return mktime(strptime(exif.datetime, "%Y:%m:%d %H:%M:%S"))
        except:
            return file.stat().st_mtime
    else:
        return file.stat().st_mtime


def do_move(file):
    creation_time = get_creation_time(file)
Ejemplo n.º 25
0
        levelnum = logging.INFO

    logging.basicConfig(level=levelnum)

    # Load up system inforamtion into a Node object representing this host
    localnode = Node(localhost, ifaddrs(), bool(sysctl_ipv4_forward()))
    network.add_node(localnode)

    # Dump this host's information to a file on the share
    with open(path.join(sharedir, localhost + '.json'), 'w') as f:
        json.dump(localnode, f, cls=Encoder)
    logging.debug("Wrote localhost node inforamtion to %s",
                  path.join(sharedir, localhost + '.json'))

    # Add a filsytem watcher to get notified of changes
    notify = Inotify()
    notify.add_watch(sharedir, IN_CLOSE_WRITE)

    # Load the current state of every written node
    # This is done after adding the watcher to prevent race conditions
    for name, node in load_nodes(sharedir).items():
        network.add_node(node)
    logging.info("Loaded %d nodes: %s", len(network.nodes),
                 " ".join(n.name for n in network.nodes.values()))

    update_routes()

    # Set up a DNS server to resolve names across subnets
    resolver = DnsResolver(network, localhost)
    dnsserver = DNSServer(resolver, port=dnsport, address=dnsaddr)
    dnsserver.start_thread()
Ejemplo n.º 26
0
class Envo:
    @dataclass
    class Sets:
        stage: str
        addons: List[str]
        init: bool

    root: Path
    stage: str
    selected_addons: List[str]
    addons: List[str]
    files_watchdog_thread: Thread

    def __init__(self, sets: Sets) -> None:
        self.se = sets

        self.addons = ["venv"]

        unknown_addons = [a for a in self.se.addons if a not in self.addons]
        if unknown_addons:
            raise RuntimeError(f"Unknown addons {unknown_addons}")

        self.inotify = Inotify()

        self._environ_before: Dict[str, Any] = os.environ.copy()

        self.shell = Shell.create()

        self.env_dirs = self._get_env_dirs()

    def spawn_shell(self) -> None:
        sys.path.insert(0, str(self.env_dirs[0].parent))
        self._start_files_watchdog()
        self.send_env_to_shell()
        self.shell.start()

    def send_env_to_shell(self) -> None:
        try:
            env: Env = self.get_env()
            env_prefix = f"{env.meta.emoji}({env.get_full_name()})"
            env.validate()
            env.activate()
            self.shell.set_variable("env", env)
            self.shell.set_variable("environ", os.environ)
            self.shell.environ.update(os.environ)

            self.shell.set_prompt_prefix(env_prefix)
        except Env.EnvException as exc:
            logger.error(exc)
            self.shell.set_prompt_prefix(f"❌{env_prefix})")
        except Exception:
            from traceback import print_exc

            print_exc()

    def _files_watchdog(self) -> None:
        for event in self.inotify.event_gen(yield_nones=False):
            (_, type_names, path, filename) = event
            if "IN_CLOSE_WRITE" in type_names:
                logger.info(f'\nDetected changes in "{str(path)}".')
                logger.info("Reloading...")
                self.send_env_to_shell()
                print("\r" + self.shell.formated_prompt, end="")

    def _start_files_watchdog(self) -> None:
        for d in self.env_dirs:
            comm_env_file = d / "env_comm.py"
            env_file = d / f"env_{self.se.stage}.py"
            self.inotify.add_watch(str(comm_env_file))
            self.inotify.add_watch(str(env_file))

        self.files_watchdog_thread = Thread(target=self._files_watchdog)
        self.files_watchdog_thread.start()

    def _get_env_dirs(self) -> List[Path]:
        ret = []
        path = Path(".").absolute()
        while True:
            env_file = path / f"env_{self.se.stage}.py"
            if env_file.exists():
                ret.append(path)
            else:
                if path == Path("/"):
                    break
            path = path.parent

        return ret

    def _create_init_files(self) -> None:
        for d in self.env_dirs:
            init_file = d / "__init__.py"

            if init_file.exists():
                init_file_tmp = d / Path("__init__.py.tmp")
                init_file_tmp.touch()
                init_file_tmp.write_text(init_file.read_text())

            if not init_file.exists():
                init_file.touch()

            init_file.write_text("# __envo_delete__")

    def _delete_init_files(self) -> None:
        for d in self.env_dirs:
            init_file = d / Path("__init__.py")
            init_file_tmp = d / Path("__init__.py.tmp")

            if init_file.read_text() == "# __envo_delete__":
                init_file.unlink()

            if init_file_tmp.exists():
                init_file.touch()
                init_file.write_text(init_file_tmp.read_text())
                init_file_tmp.unlink()

    def _unload_modules(self) -> None:
        modules = list(sys.modules.keys())[:]
        for m in modules:
            for d in self.env_dirs:
                package = d.name
                if m.startswith(package):
                    sys.modules.pop(m)

    def get_env(self) -> Env:
        env_dir = self.env_dirs[0]
        package = env_dir.name
        env_name = f"env_{self.se.stage}"
        env_file = env_dir / f"{env_name}.py"

        module_name = f"{package}.{env_name}"

        with ILock("envo_lock"):
            self._create_init_files()

            self._unload_modules()

            try:
                module = import_module_from_file(env_file)
                env: Env
                env = module.Env()
                return env
            except ImportError as exc:
                logger.error(f"""Couldn't import "{module_name}" ({exc}).""")
                raise
            finally:
                self._delete_init_files()

    def _create_from_templ(self,
                           templ_file: Path,
                           output_file: Path,
                           is_comm: bool = False) -> None:
        Environment(keep_trailing_newline=True)
        template = Template((templates_dir / templ_file).read_text())
        if output_file.exists():
            logger.error(f"{str(output_file)} file already exists.")
            exit(1)

        output_file.touch()
        env_dir = Path(".").absolute()
        package_name = comm.dir_name_to_pkg_name(env_dir.name)
        class_name = comm.dir_name_to_class_name(package_name) + "Env"

        context = {
            "class_name": class_name,
            "name": env_dir.name,
            "package_name": package_name,
            "stage": self.se.stage,
            "emoji": stage_emoji_mapping[self.se.stage],
            "selected_addons": self.se.addons,
        }

        if not is_comm:
            context["stage"] = self.se.stage

        output_file.write_text(template.render(**context))

    def init_files(self) -> None:
        env_comm_file = Path("env_comm.py")

        if not env_comm_file.exists():
            self._create_from_templ(Path("env_comm.py.templ"),
                                    env_comm_file,
                                    is_comm=True)

        env_file = Path(f"env_{self.se.stage}.py")
        self._create_from_templ(Path("env.py.templ"), env_file)
        logger.info(f"Created {self.se.stage} environment 🍰!")

    def handle_command(self, args: argparse.Namespace) -> None:
        if args.version:
            from envo.__version__ import __version__

            logger.info(__version__)
            return

        if args.init:
            self.init_files()
            return

        if args.save:
            self.get_env().dump_dot_env()
            return

        if args.dry_run:
            self.get_env().print_envs()
        else:
            self.spawn_shell()
Ejemplo n.º 27
0
class Monitor(multiprocessing.Process):
    def __init__(self):
        self._inotify = Inotify()
        self._inotify_mask = IN_ALL_EVENTS & (
            ~IN_ACCESS & ~IN_OPEN & ~IN_CLOSE_NOWRITE & ~IN_CLOSE_WRITE)

        self._dir_queue = multiprocessing.Queue()
        self._sync_queue = multiprocessing.Queue()
        self._watched_dirs = {}  # {lib_name: set(dirs)}

        super().__init__(target=self._main,
                         args=(self._dir_queue, self._sync_queue))

    def _main(self, dir_queue, sync_queue):
        next_sync_t = time() + SYNC_INTERVAL
        sync_dirs = set()

        try:
            while True:
                # Check for new directories to watch
                while not dir_queue.empty():
                    lib, path = dir_queue.get()

                    watched = (path
                               in set(chain(*self._watched_dirs.values())))

                    if lib not in self._watched_dirs:
                        self._watched_dirs[lib] = set()
                    self._watched_dirs[lib].add(path)

                    if not watched:
                        self._inotify.add_watch(
                            str(path).encode(LOCAL_FS_ENCODING),
                            self._inotify_mask)
                        print("Watching {} (lib: {}) (total dirs: {})".format(
                            path, lib, len(watched)))

                # Process Inotify
                for event in self._inotify.event_gen():
                    if event is None:
                        break

                    (header, type_names, watch_path, filename) = event
                    watch_path = Path(str(watch_path, LOCAL_FS_ENCODING))
                    filename = Path(str(filename, LOCAL_FS_ENCODING))

                    print("WD=({:d}) MASK=({:d}) "
                          "MASK->NAMES={} WATCH-PATH={} FILENAME={}".format(
                              header.wd, header.mask, type_names, watch_path,
                              filename))

                    if header.mask & (IN_ATTRIB | IN_CREATE | IN_DELETE |
                                      IN_MODIFY | IN_MOVED_TO | IN_MOVED_FROM):
                        if IN_ISDIR & header.mask and header.mask & IN_CREATE:
                            watch_path = watch_path / filename
                        elif IN_ISDIR & header.mask and header.mask & IN_DELETE:
                            self._inotify.remove_watch(
                                str(watch_path).encode(LOCAL_FS_ENCODING))

                        sync_dirs.add(watch_path)

                def _reqSync(l, d):
                    if d.exists():
                        sync_queue.put((lib, d))
                        print("Requesting sync {} (lib: {})".format(d, l))

                if time() > next_sync_t:
                    for d in sync_dirs:
                        for lib in self._watched_dirs:
                            lib_paths = self._watched_dirs[lib]
                            if d in lib_paths:
                                _reqSync(lib, d)
                                if not d.exists():
                                    self._watched_dirs[lib].remove(d)
                            elif d.parent in lib_paths:
                                _reqSync(lib, d)
                                self.dir_queue.put((lib, d))

                    sync_dirs.clear()
                    next_sync_t = time() + SYNC_INTERVAL

        except KeyboardInterrupt:
            pass
        finally:
            for path in set(chain(*self._watched_dirs.values())):
                self._inotify.remove_watch(str(path).encode(LOCAL_FS_ENCODING))

    @property
    def dir_queue(self):
        return self._dir_queue

    @property
    def sync_queue(self):
        return self._sync_queue
Ejemplo n.º 28
0
 def __init__(self, extra_files=None, callback=None):
     super(InotifyReloader, self).__init__(extra_files, callback)
     self.watcher = Inotify()
Ejemplo n.º 29
0
class Monitor(multiprocessing.Process):

    def __init__(self):
        self._inotify = Inotify()
        self._inotify_mask = IN_ALL_EVENTS & (~IN_ACCESS &
                                              ~IN_OPEN &
                                              ~IN_CLOSE_NOWRITE &
                                              ~IN_CLOSE_WRITE)

        self._dir_queue = multiprocessing.Queue()
        self._sync_queue = multiprocessing.Queue()
        self._watched_dirs = {}   # {lib_name: set(dirs)}

        super().__init__(target=self._main, args=(self._dir_queue,
                                                  self._sync_queue))

    def _main(self, dir_queue, sync_queue):
        next_sync_t = time() + SYNC_INTERVAL
        sync_dirs = set()

        try:
            while True:
                # Check for new directories to watch
                while not dir_queue.empty():
                    lib, path = dir_queue.get()

                    watched = (path in set(chain(*self._watched_dirs.values())))

                    if lib not in self._watched_dirs:
                        self._watched_dirs[lib] = set()
                    self._watched_dirs[lib].add(path)

                    if not watched:
                        self._inotify.add_watch(str(path), self._inotify_mask)
                        log.info(f"Watching {path} (lib: {lib}) "
                                 f"(total dirs: {len(self._watched_dirs[lib])}")

                # Process Inotify
                for event in self._inotify.event_gen():
                    if event is None:
                        break

                    (header,
                     type_names,
                     watch_path,
                     filename) = event
                    watch_path = Path(watch_path)
                    filename = Path(filename)

                    log.debug(
                        f"WD=({header.wd}) MASK=({header.mask}) "
                        f"MASK->NAMES={type_names} WATCH-PATH={watch_path} FILENAME={filename}")

                    if header.mask & (IN_ATTRIB | IN_CREATE | IN_DELETE |
                                      IN_MODIFY | IN_MOVED_TO | IN_MOVED_FROM):
                        if IN_ISDIR & header.mask and header.mask & IN_CREATE:
                            watch_path = watch_path / filename
                        elif IN_ISDIR & header.mask and header.mask & IN_DELETE:
                            self._inotify.remove_watch(str(watch_path))

                        sync_dirs.add(watch_path)

                def _reqSync(l, d):
                    if d.exists():
                        sync_queue.put((lib, d))
                        log.info(f"Requesting sync {d} (lib: {l})")

                if time() > next_sync_t:
                    for d in sync_dirs:
                        for lib in self._watched_dirs:
                            lib_paths = self._watched_dirs[lib]
                            if d in lib_paths:
                                _reqSync(lib, d)
                                if not d.exists():
                                    self._watched_dirs[lib].remove(d)
                            elif d.parent in lib_paths:
                                _reqSync(lib, d)
                                self.dir_queue.put((lib, d))

                    sync_dirs.clear()
                    next_sync_t = time() + SYNC_INTERVAL

        except KeyboardInterrupt:
            pass
        finally:
            for path in set(chain(*self._watched_dirs.values())):
                self._inotify.remove_watch(str(path))

    @property
    def dir_queue(self):
        return self._dir_queue

    @property
    def sync_queue(self):
        return self._sync_queue
Ejemplo n.º 30
0
def process_loop(clone, label, tmp_mount, concurrent, exclude, sudo):
    """Drop into the iNotify loop continually scanning for new devices.

    Args:
        clone: list of files/folders that need to be put on formatted drives.
        label: name to apply to the formatted drives.
        tmp_mount: mount folder for the formatted device to clone files to.
        concurrent (int): number of concurrent threads for writing.
        exclude (Set[str]): file extensions to ignore while cloning.
        sudo (Command): sudo/root password (if we need it)
    """
    # what we want to do with each device
    data = {
        'clone': clone,
        'label': label,
        'tmp_mount': tmp_mount,
        'concurrent': concurrent,
        'exclude': exclude,
        'sudo': sudo
    }

    manager = CallbackManager(data)
    notify = Inotify(paths=['/dev'])

    # map the device identifier of our "original usb sticks" so
    #  we don't accidentally  overwrite something that was already plugged in.
    # However, this will not count against original devices that are
    #  unplugged and plugged back in.
    original_devices = {o: None for o in usb_storage_devices()}
    current_devices = {}

    for device in original_devices.keys():
        logger.debug('original device, %s', device)

    # track the Inotify events in a loop.
    last_events = deque(list(), maxlen=6)

    # prime the state; curry
    get_state = process_state(notify, last_events, original_devices)

    logger.info('~ready')

    try:
        while True:
            new_devices = get_state(current_devices)

            if new_devices:
                # device was added
                for device, mount in new_devices.items():
                    logger.info('found new device %s at %s', device, mount)
                    current_devices[device] = mount
                    manager.on_new_device(device, mount)
                continue

            current_usb = usb_storage_devices()

            for device in list(current_devices.keys()):
                if device not in current_usb:
                    # device was removed
                    mount = current_devices.pop(device)
                    logger.info('device was removed %s from %s', device, mount)
                    manager.on_removed_device(device, mount)
        # infinite loop
    except KeyboardInterrupt:
        manager.stop()
        raise
    except Exception as e:
        logger.error(e)
        raise
Ejemplo n.º 31
0
 def __init__(self, extra_files=None, callback=None):
     super(InotifyReloader, self).__init__()
     self.setDaemon(True)
     self._callback = callback
     self._dirs = set()
     self._watcher = Inotify()
Ejemplo n.º 32
0
class Envo:
    @dataclass
    class Sets:
        stage: str
        addons: List[str]
        init: bool

    environ_before = Dict[str, str]
    selected_addons: List[str]
    addons: List[str]
    files_watchdog_thread: Thread
    shell: shell.Shell
    inotify: Inotify
    env_dirs: List[Path]
    quit: bool
    env: Env

    def __init__(self, sets: Sets) -> None:
        self.se = sets

        self.addons = ["venv"]

        unknown_addons = [a for a in self.se.addons if a not in self.addons]
        if unknown_addons:
            raise EnvoError(f"Unknown addons {unknown_addons}")

        self.inotify = Inotify()

        self.env_dirs = self._get_env_dirs()
        self.quit: bool = False

        self.environ_before = os.environ.copy()  # type: ignore

        self._set_context_thread: Optional[Thread] = None

    def spawn_shell(self, type: Literal["fancy", "simple",
                                        "headless"]) -> None:
        """
        :param type: shell type
        """
        self.shell = shell.shells[type].create()
        self._start_files_watchdog()

        self.restart()
        self.shell.start()

        self._on_unload()
        self._stop_files_watchdog()

        self._on_destroy()

    def restart(self) -> None:
        try:
            os.environ = self.environ_before.copy()  # type: ignore

            if not hasattr(self, "env"):
                self.env = self.create_env()
                self._on_create()
            else:
                self._on_unload()
                self.env = self.create_env()

            self.env.validate()
            self.env.activate()
            self._on_load()
            self.shell.reset()
            self.shell.set_variable("env", self.env)
            self.shell.set_variable("environ", self.shell.environ)

            self._set_context_thread = Thread(target=self._set_context)
            self._set_context_thread.start()

            glob_cmds = [
                c for c in self.env.get_magic_functions()["command"]
                if c.kwargs["glob"]
            ]
            for c in glob_cmds:
                self.shell.set_variable(c.name, c)

            self.shell.pre_cmd = self._on_precmd
            self.shell.on_stdout = self._on_stdout
            self.shell.on_stderr = self._on_stderr
            self.shell.post_cmd = self._on_postcmd

            self.shell.environ.update(self.env.get_env_vars())
            self.shell.set_prompt_prefix(
                self._get_prompt_prefix(
                    loading=self._set_context_thread.is_alive()))

        except EnvoError as exc:
            logger.error(exc)
            self.shell.set_prompt_prefix("❌")
        except Exception:
            from traceback import print_exc

            print_exc()
            self.shell.set_prompt_prefix("❌")

    def _get_prompt_prefix(self, loading: bool = False) -> str:
        env_prefix = f"{self.env.meta.emoji}({self.env.get_full_name()})"

        if loading:
            env_prefix = "⏳" + env_prefix

        return env_prefix

    def _set_context(self) -> None:
        for c in self.env.get_magic_functions()["context"]:
            context = c()
            self.shell.update_context(context)
        self.shell.set_prompt_prefix(self._get_prompt_prefix(loading=False))

    def _on_create(self) -> None:
        for h in self.env.get_magic_functions()["oncreate"]:
            h()

    def _on_destroy(self) -> None:
        for h in self.env.get_magic_functions()["ondestroy"]:
            h()

    def _on_load(self) -> None:
        for h in self.env.get_magic_functions()["onload"]:
            h()

    def _on_unload(self) -> None:
        for h in self.env.get_magic_functions()["onunload"]:
            h()

    def _on_precmd(self, command: str) -> str:
        for h in self.env.get_magic_functions()["precmd"]:
            if re.match(h.kwargs["cmd_regex"], command):
                ret = h(command=command)  # type: ignore
                if ret:
                    command = ret
        return command

    def _on_stdout(self, command: str, out: str) -> str:
        for h in self.env.get_magic_functions()["onstdout"]:
            if re.match(h.kwargs["cmd_regex"], command):
                ret = h(command=command, out=out)  # type: ignore
                if ret:
                    out = ret
        return out

    def _on_stderr(self, command: str, out: str) -> str:
        for h in self.env.get_magic_functions()["onstderr"]:
            if re.match(h.kwargs["cmd_regex"], command):
                ret = h(command=command, out=out)  # type: ignore
                if ret:
                    out = ret
        return out

    def _on_postcmd(self, command: str, stdout: List[str],
                    stderr: List[str]) -> None:
        for h in self.env.get_magic_functions()["postcmd"]:
            if re.match(h.kwargs["cmd_regex"], command):
                h(command=command, stdout=stdout,
                  stderr=stderr)  # type: ignore

    def _files_watchdog(self) -> None:
        for event in self.inotify.event_gen(yield_nones=False):
            if self.quit:
                return

            (_, type_names, path, filename) = event
            if "IN_CLOSE_WRITE" in type_names:
                logger.info(f'\nDetected changes in "{str(path)}".')
                logger.info("Reloading...")
                self.restart()
                print("\r" + self.shell.prompt, end="")

    def _start_files_watchdog(self) -> None:
        for d in self.env_dirs:
            comm_env_file = d / "env_comm.py"
            env_file = d / f"env_{self.se.stage}.py"
            self.inotify.add_watch(str(comm_env_file))
            self.inotify.add_watch(str(env_file))

        self.files_watchdog_thread = Thread(target=self._files_watchdog)
        self.files_watchdog_thread.start()

    def _stop_files_watchdog(self) -> None:
        self.quit = True
        env_comm = self.env_dirs[0] / "env_comm.py"
        # Save the same content to trigger inotify event
        env_comm.read_text()

    def _get_env_dirs(self) -> List[Path]:
        ret = []
        path = Path(".").absolute()
        while True:
            env_file = path / f"env_{self.se.stage}.py"
            if env_file.exists():
                ret.append(path)
            else:
                if path == Path("/"):
                    break
            path = path.parent

        return ret

    def _create_init_files(self) -> None:
        """
        Create __init__.py files if not exist.

        If exist save them to __init__.py.tmp to recover later.
        This step is needed because there might be some content in existing that might crash envo.
        """

        for d in self.env_dirs:
            init_file = d / "__init__.py"

            if init_file.exists():
                init_file_tmp = d / Path("__init__.py.tmp")
                init_file_tmp.touch()
                init_file_tmp.write_text(init_file.read_text())

            if not init_file.exists():
                init_file.touch()

            init_file.write_text("# __envo_delete__")

    def _delete_init_files(self) -> None:
        """
        Delete __init__.py files if crated otherwise recover.
        :return:
        """
        for d in self.env_dirs:
            init_file = d / Path("__init__.py")
            init_file_tmp = d / Path("__init__.py.tmp")

            if init_file.read_text() == "# __envo_delete__":
                init_file.unlink()

            if init_file_tmp.exists():
                init_file.touch()
                init_file.write_text(init_file_tmp.read_text())
                init_file_tmp.unlink()

    def create_env(self) -> Env:
        env_dir = self.env_dirs[0]
        package = env_dir.name
        env_name = f"env_{self.se.stage}"
        env_file = env_dir / f"{env_name}.py"

        module_name = f"{package}.{env_name}"

        # We have to lock this part in case there's other shells concurrently executing this code
        with ILock("envo_lock"):
            self._create_init_files()

            # unload modules
            for m in list(sys.modules.keys())[:]:
                if m.startswith("env_"):
                    sys.modules.pop(m)

            try:
                module = import_from_file(env_file)
                env: Env
                env = module.Env()
                return env
            except ImportError as exc:
                raise EnvoError(
                    f"""Couldn't import "{module_name}" ({exc}).""")
            finally:
                self._delete_init_files()

    def _create_from_templ(self,
                           templ_file: Path,
                           output_file: Path,
                           is_comm: bool = False) -> None:
        """
        Create env file from template.

        :param templ_file:
        :param output_file:
        :param is_comm:
        :return:
        """
        Environment(keep_trailing_newline=True)
        if output_file.exists():
            raise EnvoError(f"{str(output_file)} file already exists.")

        env_dir = Path(".").absolute()
        package_name = misc.dir_name_to_pkg_name(env_dir.name)
        class_name = misc.dir_name_to_class_name(package_name) + "Env"

        if misc.is_valid_module_name(env_dir.name):
            env_comm_import = f"from env_comm import {class_name}Comm"
        else:
            env_comm_import = (
                "from pathlib import Path\n"
                f"from envo.misc import import_from_file\n\n\n"
                f'{class_name}Comm = import_from_file(Path("env_comm.py")).{class_name}Comm'
            )

        context = {
            "class_name": class_name,
            "name": env_dir.name,
            "package_name": package_name,
            "stage": self.se.stage,
            "emoji": stage_emoji_mapping.get(self.se.stage, "🙂"),
            "selected_addons": self.se.addons,
            "env_comm_import": env_comm_import,
        }

        if not is_comm:
            context["stage"] = self.se.stage

        misc.render_py_file(templates_dir / templ_file,
                            output=output_file,
                            context=context)

    def init_files(self) -> None:
        env_comm_file = Path("env_comm.py")
        if not env_comm_file.exists():
            self._create_from_templ(Path("env_comm.py.templ"),
                                    env_comm_file,
                                    is_comm=True)

        env_file = Path(f"env_{self.se.stage}.py")
        self._create_from_templ(Path("env.py.templ"), env_file)
        logger.info(f"Created {self.se.stage} environment 🍰!")

    def handle_command(self, args: argparse.Namespace) -> None:
        if args.version:
            from envo.__version__ import __version__

            logger.info(__version__)
            return

        if args.init:
            self.init_files()
            return

        if not self.env_dirs:
            raise EnvoError("Couldn't find any env!\n"
                            'Forgot to run envo --init" first?')
        sys.path.insert(0, str(self.env_dirs[0]))

        if args.save:
            self.create_env().dump_dot_env()
            return

        if args.command:
            self.spawn_shell("headless")
            try:
                self.shell.default(args.command)
            except SystemExit as e:
                sys.exit(e.code)
            else:
                sys.exit(self.shell.history[-1].rtn)

        if args.dry_run:
            content = "\n".join([
                f'export {k}="{v}"'
                for k, v in self.create_env().get_env_vars().items()
            ])
            print(content)
        else:
            self.spawn_shell(args.shell)
Ejemplo n.º 33
0
        if not el[2] == 2:
            query.append([(
                el[0][0],
                len(query),
            ), el[1], el[2]])
except (IOError, UnicodeDecodeError) as e:
    print("File query.pkl not found or corrupt, creating new empty query")

chdir("/home/catomaior/Documents/Scuola/A.S. 2019-2020")

if isConnected():
    for el in query:
        el[2] = 1
        start_new_thread(el[1], el[0])

i = Inotify()
for dir in DIRS:
    i.add_watch(dir)

lastEvent = ()
operations = {"temp": "temp"}
for event in i.event_gen(yield_nones=False):
    (_, type_names, path, filename) = event

    if (filename.endswith(".pdf") or filename.endswith(".pptx")) and not (
            filename[0] == "." or (path, filename, type_names) == lastEvent):
        if not filename in operations:
            operations[filename] = []
        operations[filename].append(type_names[0])
        #print(operations[filename], filename, path)
Ejemplo n.º 34
0
 def __init__(self, process_number, fields, input_directory, output_directory, user=None):
     self._pcap_to_csv_pool = PcapToCsvPool(process_number, fields, output_directory, user)
     self._inotify = i = Inotify()
     self._inotify.add_watch(input_directory.encode("utf8"))
     self._log = logging.getLogger("PcapToCsv")
Ejemplo n.º 35
0
 def __init__(self, extra_files=None, callback=None):
     super(InotifyReloader, self).__init__()
     self.setDaemon(True)
     self._callback = callback
     self._dirs = set()
     self._watcher = Inotify()