Beispiel #1
0
    def __init__(self, port, attrs=None):
        Thread.__init__(self)

        self._loop = True
        self.out_socket = get_context().socket(ROUTER)
        self.out_socket.bind("tcp://*:" + str(port))
        self.port = port
        self.in_socket = get_context().socket(PULL)
        self.in_socket.bind("inproc://replies" + str(port))

        self._poller = Poller()
        self._poller.register(self.out_socket, POLLIN)
        self._poller.register(self.in_socket, POLLIN)
        self._attrs = attrs
        try:
            # Checking the validity of the file pattern
            globify(attrs["origin"])
        except ValueError as err:
            raise ConfigError('Invalid file pattern: ' + str(err))
        except KeyError:
            if 'listen' not in attrs:
                raise
        self._deleter = Deleter(attrs)

        try:
            self._station = self._attrs["station"]
        except (KeyError, TypeError):
            LOGGER.warning("Station is not defined in config file")
            self._station = "unknown"
        LOGGER.debug("Station is '%s'", self._station)
Beispiel #2
0
def _update_chains(chains, new_chains, manager, use_watchdog, publisher,
                   notifier_builder):
    old_glob = []
    for chain_name, chain in new_chains.items():
        chain_updated = False
        if chain_name in chains:
            if _chains_are_identical(chains, new_chains, chain_name):
                continue
            chain_updated = True
            _stop_chain(chains[chain_name])

        if not _add_chain(chains, chain_name, chain, manager):
            continue

        fun = _create_notifier_and_get_function(notifier_builder,
                                                chains[chain_name],
                                                use_watchdog, chain, publisher)

        if 'origin' in chain:
            old_glob.append((globify(chain["origin"]), fun, chain))

        if chain_updated:
            LOGGER.debug("Updated %s", chain_name)
        else:
            LOGGER.debug("Added %s", chain_name)

    return old_glob
Beispiel #3
0
def test_create_watchdog_notifier(process_notify):
    """Test creating a watchdog notifier."""
    import time
    from trollmoves.server import create_watchdog_notifier
    from trollsift import globify

    fname = "20200428_1000_foo.tif"
    fname_pattern = "{start_time:%Y%m%d_%H%M}_{product}.tif"
    publisher = "publisher"
    with TemporaryDirectory() as tmpdir:
        pattern_path = os.path.join(tmpdir, fname_pattern)
        file_path = os.path.join(tmpdir, fname)
        attrs = {"origin": pattern_path}
        observer, fun = create_watchdog_notifier(attrs, publisher)
        observer.start()

        with open(os.path.join(file_path), "w") as fid:
            fid.write('')

        # Wait for a while for the watchdog to register the event
        time.sleep(2.0)

        observer.stop()
        observer.join()

    fun.assert_called_with(file_path, publisher, globify(pattern_path), attrs)
Beispiel #4
0
    def push(self, message):
        """Reply to push request
        """
        uri = urlparse(message.data["uri"])
        pathname = uri.path

        if not fnmatch.fnmatch(
                os.path.basename(pathname),
                os.path.basename(globify(self._attrs["origin"]))):
            LOGGER.warning('Client trying to get invalid file: %s', pathname)
            return Message(message.subject,
                           "err",
                           data="{0:s} not reachable".format(pathname))
        try:
            move_it(message, self._attrs)
        except Exception as err:
            return Message(message.subject, "err", data=str(err))
        else:
            if (self._attrs.get('compression') or self._attrs.get(
                    'delete', 'False').lower() in ["1", "yes", "true", "on"]):
                self._deleter.add(pathname)
            new_msg = Message(message.subject,
                              "file",
                              data=message.data.copy())
            new_msg.data['destination'] = clean_url(
                new_msg.data['destination'])
            return new_msg
Beispiel #5
0
def create_notifier(attrs):
    """Create a notifier from the specified configuration attributes *attrs*.
    """

    tmask = (pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_TO
             | pyinotify.IN_CREATE)

    wm_ = pyinotify.WatchManager()

    opath, ofile = os.path.split(globify(attrs["origin"]))

    def fun(pathname):
        """Execute unpacking and copying/moving of *pathname*
        """
        efile = os.path.basename(pathname)
        if fnmatch.fnmatch(efile, ofile):
            LOGGER.info("We have a match: " + str(pathname))
            if attrs["compression"]:
                try:
                    unpack_fun = eval(attrs["compression"])
                    if "prog" in attrs:
                        new_path = unpack_fun(pathname,
                                              attrs["working_directory"],
                                              attrs["prog"])
                    else:
                        new_path = unpack_fun(pathname,
                                              attrs["working_directory"])
                except:
                    LOGGER.exception("Could not decompress " + pathname)
                    return

            else:
                new_path = pathname
            try:
                move_it(new_path, attrs["destinations"],
                        attrs.get("copy_hook", None))
            except Exception, e:
                LOGGER.error("Something went wrong during copy of " + pathname)
            else:
                if attrs["delete"]:
                    try:
                        os.remove(pathname)
                        if attrs["delete_hook"]:
                            attrs["delete_hook"](pathname)
                        LOGGER.debug("Removed " + pathname)
                    except OSError, e__:
                        if e__.errno == 2:
                            LOGGER.debug("Already deleted: " + pathname)
                        else:
                            raise

            # delete temporary file
            if pathname != new_path:
                try:
                    os.remove(new_path)
                except OSError, e__:
                    if e__.errno == 2:
                        pass
                    else:
                        raise
Beispiel #6
0
    def ack(self, message):
        """Reply with ack to a publication
        """
        for url in gen_dict_extract(message.data, 'uri'):
            uri = urlparse(url)
            pathname = uri.path

            if 'origin' in self._attrs and not fnmatch.fnmatch(
                    os.path.basename(pathname),
                    os.path.basename(globify(self._attrs["origin"]))):
                LOGGER.warning('Client trying to get invalid file: %s',
                               pathname)
                return Message(message.subject,
                               "err",
                               data="{0:s} not reacheable".format(pathname))

            if (self._attrs.get('compression') or self._attrs.get(
                    'delete', 'False').lower() in ["1", "yes", "true", "on"]):
                self._deleter.add(pathname)
        new_msg = Message(message.subject, "ack", data=message.data.copy())
        try:
            new_msg.data['destination'] = clean_url(
                new_msg.data['destination'])
        except KeyError:
            pass
        return new_msg
Beispiel #7
0
 def _validate_file_pattern(self):
     try:
         _ = globify(self._attrs["origin"])
     except ValueError as err:
         raise ConfigError('Invalid file pattern: ' + str(err))
     except KeyError:
         if 'listen' not in self._attrs:
             raise
Beispiel #8
0
 def _validate_requested_file(self, pathname, message):
     # FIXME: check against file_cache
     if 'origin' in self._attrs and not fnmatch.fnmatch(
             os.path.basename(pathname),
             os.path.basename(globify(self._attrs["origin"]))):
         LOGGER.warning('Client trying to get invalid file: %s', pathname)
         return Message(message.subject,
                        "err",
                        data="{0:s} not reachable".format(pathname))
     return None
Beispiel #9
0
def create_watchdog_notifier(attrs, publisher):
    """Create a notifier from the specified configuration attributes *attrs*."""
    pattern = globify(attrs["origin"])
    opath = os.path.dirname(pattern)

    timeout = float(attrs.get("watchdog_timeout", 1.))
    LOGGER.debug("Watchdog timeout: %.1f", timeout)
    observer = PollingObserver(timeout=timeout)
    handler = WatchdogHandler(process_notify, publisher, pattern, attrs)

    observer.schedule(handler, opath)

    return observer, process_notify
Beispiel #10
0
def test_process_notify(Message):
    """Test process_notify()."""
    from trollmoves.server import process_notify
    from trollmoves.server import file_cache, file_cache_lock
    from trollsift import globify
    import datetime as dt

    fname = "20200428_1000_foo.tif"
    fname_pattern = "{start_time:%Y%m%d_%H%M}_{product}.tif"
    not_matching_pattern = "bar"
    publisher = MagicMock()
    with TemporaryDirectory() as tmpdir:
        matching_pattern = os.path.join(tmpdir, fname_pattern)
        pathname = os.path.join(tmpdir, fname)
        kwargs = {
            "origin": matching_pattern,
            "request_address": "localhost",
            "request_port": "9001",
            "topic": "/topic"
        }

        process_notify(pathname, publisher, not_matching_pattern, kwargs)
        publisher.assert_not_called()

        with open(os.path.join(pathname), "w") as fid:
            fid.write('foo')

        process_notify(pathname, publisher, globify(matching_pattern), kwargs)

        # Check that the message was formed correctly
        message_info = {
            'start_time': dt.datetime(2020, 4, 28, 10, 0),
            'product': 'foo',
            'uri': pathname,
            'uid': fname,
            'request_address': 'localhost:9001'
        }
        Message.assert_called_with(kwargs['topic'], 'file', message_info)
        # Check that the publisher send was called
        publisher.send.assert_called_with(str(Message.return_value))
        # Check that the file cache was updated
        with file_cache_lock:
            assert "/topic/20200428_1000_foo.tif" in file_cache
            assert len(file_cache) == 1
Beispiel #11
0
def create_file_notifier(attrs, publisher):
    """Create a notifier from the specified configuration attributes *attrs*.
    """

    tmask = (pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_TO
             | pyinotify.IN_CREATE)

    wm_ = pyinotify.WatchManager()

    pattern = globify(attrs["origin"])
    opath = os.path.dirname(pattern)

    def fun(orig_pathname):
        """Publish what we have."""
        if not fnmatch.fnmatch(orig_pathname, pattern):
            return
        else:
            LOGGER.debug('We have a match: %s', orig_pathname)

        pathname = unpack(orig_pathname, **attrs)

        info = attrs.get("info", {})
        if info:
            info = dict((elt.strip().split('=') for elt in info.split(";")))
            for infokey, infoval in info.items():
                if "," in infoval:
                    info[infokey] = infoval.split(",")

        info.update(parse(attrs["origin"], orig_pathname))
        info['uri'] = pathname
        info['uid'] = os.path.basename(pathname)
        info['request_address'] = attrs.get(
            "request_address", get_own_ip()) + ":" + attrs["request_port"]
        msg = Message(attrs["topic"], 'file', info)
        publisher.send(str(msg))
        with file_cache_lock:
            file_cache.appendleft(attrs["topic"] + '/' + info["uid"])
        LOGGER.debug("Message sent: " + str(msg))

    tnotifier = pyinotify.ThreadedNotifier(wm_, EventHandler(fun))

    wm_.add_watch(opath, tmask)

    return tnotifier, fun
Beispiel #12
0
    def push(self, message):
        """Reply to push request
        """
        for the_dict in gen_dict_contains(message.data, 'uri'):
            uri = urlparse(the_dict['uri'])
            rel_path = the_dict.get('path', '')
            pathname = uri.path
            # FIXME: check against file_cache
            if 'origin' in self._attrs and not fnmatch.fnmatch(
                    os.path.basename(pathname),
                    os.path.basename(globify(self._attrs["origin"]))):
                LOGGER.warning('Client trying to get invalid file: %s',
                               pathname)
                return Message(message.subject,
                               "err",
                               data="{0:s} not reachable".format(pathname))
            try:
                move_it(pathname,
                        message.data['destination'],
                        self._attrs,
                        rel_path=rel_path)
            except Exception as err:
                return Message(message.subject, "err", data=str(err))
            else:
                if (self._attrs.get('compression')
                        or self._attrs.get('delete', 'False').lower()
                        in ["1", "yes", "true", "on"]):
                    self._deleter.add(pathname)

            if 'dataset' in message.data:
                mtype = 'dataset'
            elif 'collection' in message.data:
                mtype = 'collection'
            elif 'uid' in message.data:
                mtype = 'file'
            else:
                raise KeyError('No known metadata in message.')

        new_msg = Message(message.subject, mtype, data=message.data.copy())
        new_msg.data['destination'] = clean_url(new_msg.data['destination'])
        return new_msg
Beispiel #13
0
def create_inotify_notifier(attrs, publisher):
    """Create a notifier from the specified configuration attributes *attrs*."""
    tmask = (pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_TO
             | pyinotify.IN_CREATE | pyinotify.IN_DELETE)

    wm_ = pyinotify.WatchManager()

    pattern = globify(attrs["origin"])
    opath = os.path.dirname(pattern)

    if 'origin_inotify_base_dir_skip_levels' in attrs:
        """If you need to inotify monitor for new directories within the origin
        this attribute tells the server how many levels to skip from the origin
        before staring to inorify monitor a directory

        Eg. origin=/tmp/{platform_name_dir}_{start_time_dir:%Y%m%d_%H%M}_{orbit_number_dir:05d}/
                   {sensor}_{platform_name}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.{data_processing_level:3s}

        and origin_inotify_base_dir_skip_levels=-2

        this means the inotify monitor will use opath=/tmp"""
        pattern_list = pattern.split('/')
        pattern_join = os.path.join(
            *pattern_list[:int(attrs['origin_inotify_base_dir_skip_levels'])])
        opath = os.path.join("/", pattern_join)
        LOGGER.debug("Using %s as base path for pyinotify add_watch.", opath)

    def process_notify_publish(pathname):
        pattern = globify(attrs["origin"])
        return process_notify(pathname, publisher, pattern, attrs)

    tnotifier = pyinotify.ThreadedNotifier(
        wm_, EventHandler(process_notify_publish,
                          watchManager=wm_,
                          tmask=tmask))

    wm_.add_watch(opath, tmask)

    return tnotifier, process_notify
    def __init__(self,
                 configfile,
                 shp_boarders,
                 shp_mask,
                 regional_filtermask=None):
        """Initialize the active fires post processor class."""
        super().__init__()
        self.shp_boarders = shp_boarders
        self.shp_filtermask = shp_mask
        self.regional_filtermask = regional_filtermask
        self.configfile = configfile
        self.options = {}

        config = read_config(self.configfile)
        self._set_options_from_config(config)

        self.host = socket.gethostname()

        self.timezone = self.options.get('timezone', 'GMT')

        self.input_topic = self.options['subscribe_topics'][0]
        self.output_topic = self.options['publish_topic']
        self.infile_pattern = self.options.get('af_pattern_ibands')
        self.outfile_pattern_national = self.options.get(
            'geojson_file_pattern_national')
        self.outfile_pattern_regional = self.options.get(
            'geojson_file_pattern_regional')
        self.output_dir = self.options.get('output_dir', '/tmp')

        frmt = self.options['regional_shapefiles_format']
        self.regional_shapefiles_globstr = globify(frmt)

        self.listener = None
        self.publisher = None
        self.loop = False
        self._setup_and_start_communication()
Beispiel #15
0
def reload_config(filename, disable_backlog=False):
    """Rebuild chains if needed (if the configuration changed) from *filename*.
    """
    LOGGER.debug("New config file detected! %s", filename)

    new_chains = read_config(filename)

    old_glob = []

    config_changed = False
    for key, val in new_chains.items():
        identical = True
        if key in chains:
            for key2, val2 in new_chains[key].items():
                if ((key2 not in ["notifier", "publisher"])
                        and ((key2 not in chains[key]) or
                             (chains[key][key2] != val2))):
                    identical = False
                    config_changed = True
                    break
            if identical:
                continue

            chains[key]["notifier"].stop()
            if "publisher" in chains[key]:
                chains[key]["publisher"].stop()

        chains[key] = val
        try:
            chains[key]["publisher"] = NoisyPublisher("move_it_" + key,
                                                      val["publish_port"])
        except (KeyError, NameError):
            pass
        chains[key]["notifier"] = create_notifier(val)
        # create logger too!
        if "publisher" in chains[key]:
            pub = chains[key]["publisher"].start()
        chains[key]["notifier"].start()
        old_glob.append(globify(val["origin"]))

        if "publisher" in chains[key]:

            def copy_hook(pathname, dest, val=val, pub=pub):
                fname = os.path.basename(pathname)

                destination = urlunparse((dest.scheme, dest.hostname,
                                          os.path.join(dest.path,
                                                       fname), dest.params,
                                          dest.query, dest.fragment))
                info = val.get("info", "")
                if info:
                    info = dict(
                        (elt.strip().split('=') for elt in info.split(";")))
                    for infokey, infoval in info.items():
                        if "," in infoval:
                            info[infokey] = infoval.split(",")
                else:
                    info = {}
                try:
                    info.update(
                        parse(os.path.basename(val["origin"]),
                              os.path.basename(pathname)))
                except ValueError:
                    info.update(
                        parse(
                            os.path.basename(
                                os.path.splitext(val["origin"])[0]),
                            os.path.basename(pathname)))

                info['uri'] = destination
                info['uid'] = fname
                msg = Message(val["topic"], 'file', info)
                pub.send(str(msg))
                LOGGER.debug("Message sent: %s", str(msg))

            chains[key]["copy_hook"] = copy_hook

            def delete_hook(pathname, val=val, pub=pub):
                fname = os.path.basename(pathname)
                info = val.get("info", "")
                if info:
                    info = dict(
                        (elt.strip().split('=') for elt in info.split(";")))
                info['uri'] = pathname
                info['uid'] = fname
                msg = Message(val["topic"], 'del', info)
                pub.send(str(msg))
                LOGGER.debug("Message sent: %s", str(msg))

            chains[key]["delete_hook"] = delete_hook

        if not identical:
            LOGGER.debug("Updated %s", key)
        else:
            LOGGER.debug("Added %s", key)

    for key in (set(chains.keys()) - set(new_chains.keys())):
        chains[key]["notifier"].stop()
        del chains[key]
        LOGGER.debug("Removed %s", key)

    if config_changed:
        LOGGER.debug("Reloaded config from %s", filename)
    else:
        LOGGER.debug("No changes to reload in %s", filename)

    if old_glob and not disable_backlog:
        fnames = []
        for pattern in old_glob:
            fnames += glob.glob(pattern)
        if fnames:
            time.sleep(3)
            LOGGER.debug("Touching old files")
            for fname in fnames:
                if os.path.exists(fname):
                    fp_ = open(fname, "ab")
                    fp_.close()
        old_glob = []
        LOGGER.info("Old files transferred")
Beispiel #16
0
def reload_config(filename,
                  chains,
                  notifier_builder=None,
                  manager=RequestManager,
                  publisher=None,
                  disable_backlog=False):
    """Rebuild chains if needed (if the configuration changed) from *filename*.
    """

    LOGGER.debug("New config file detected! " + filename)

    new_chains = read_config(filename)

    old_glob = []

    for key, val in new_chains.items():
        identical = True
        if key in chains:
            for key2, val2 in new_chains[key].items():
                if ((key2 not in ["notifier", "publisher"])
                        and ((key2 not in chains[key]) or
                             (chains[key][key2] != val2))):
                    identical = False
                    break
            if identical:
                continue

            chains[key]["notifier"].stop()
            if "request_manager" in chains[key]:
                chains[key]["request_manager"].stop()
                LOGGER.debug('Stopped reqman')

        chains[key] = val.copy()
        try:
            chains[key]["request_manager"] = manager(int(val["request_port"]),
                                                     val)
            LOGGER.debug("Created request manager on port %s",
                         val["request_port"])
        except (KeyError, NameError):
            LOGGER.exception('In reading config')
        except ConfigError as err:
            LOGGER.error('Invalid config parameters in %s: %s', key, str(err))
            LOGGER.warning('Remove and skip %s', key)
            del chains[key]
            continue

        if notifier_builder is None:
            if 'origin' in val:
                notifier_builder = create_file_notifier
            elif 'listen' in val:
                notifier_builder = create_posttroll_notifier

        chains[key]["notifier"], fun = notifier_builder(val, publisher)
        chains[key]["request_manager"].start()
        chains[key]["notifier"].start()
        if 'origin' in val:
            old_glob.append((globify(val["origin"]), fun))

        if not identical:
            LOGGER.debug("Updated " + key)
        else:
            LOGGER.debug("Added " + key)

    for key in (set(chains.keys()) - set(new_chains.keys())):
        chains[key]["notifier"].stop()
        del chains[key]
        LOGGER.debug("Removed " + key)

    LOGGER.debug("Reloaded config from " + filename)
    if old_glob and not disable_backlog:
        time.sleep(3)
        for pattern, fun in old_glob:
            process_old_files(pattern, fun)

    LOGGER.debug("done reloading config")
Beispiel #17
0
def reload_config(filename):
    """Rebuild chains if needed (if the configuration changed) from *filename*.
    """
    if os.path.abspath(filename) != os.path.abspath(cmd_args.config_file):
        return

    LOGGER.debug("New config file detected! " + filename)

    new_chains = read_config(filename)

    old_glob = []

    for key, val in new_chains.iteritems():
        identical = True
        if key in chains:
            for key2, val2 in new_chains[key].iteritems():
                if ((key2 not in ["notifier", "publisher"]) and
                    ((key2 not in chains[key]) or
                     (chains[key][key2] != val2))):
                    identical = False
                    break
            if identical:
                continue

            chains[key]["notifier"].stop()
            if "publisher" in chains[key]:
                chains[key]["publisher"].stop()

        chains[key] = val
        try:
            chains[key]["publisher"] = NoisyPublisher("move_it_" + key,
                                                      val["publish_port"])
        except (KeyError, NameError):
            pass
        chains[key]["notifier"] = create_notifier(val)
        # create logger too!
        if "publisher" in chains[key]:
            pub = chains[key]["publisher"].start()
        chains[key]["notifier"].start()
        old_glob.append(globify(val["origin"]))

        if "publisher" in chains[key]:
            def copy_hook(pathname, dest, val=val, pub=pub):
                fname = os.path.basename(pathname)

                destination = urlunparse((dest.scheme,
                                          dest.hostname,
                                          os.path.join(dest.path, fname),
                                          dest.params,
                                          dest.query,
                                          dest.fragment))
                info = val.get("info", "")
                if info:
                    info = dict((elt.strip().split('=')
                                 for elt in info.split(";")))
                    for infokey, infoval in info.items():
                        if "," in infoval:
                            info[infokey] = infoval.split(",")
                else:
                    info = {}
                info.update(parse(val["origin"], pathname))
                info['uri'] = destination
                info['uid'] = fname
                msg = Message(val["topic"], 'file', info)
                pub.send(str(msg))
                LOGGER.debug("Message sent: " + str(msg))

            chains[key]["copy_hook"] = copy_hook

            def delete_hook(pathname, val=val, pub=pub):
                fname = os.path.basename(pathname)
                info = val.get("info", "")
                if info:
                    info = dict((elt.strip().split('=')
                                 for elt in info.split(";")))
                info['uri'] = pathname
                info['uid'] = fname
                msg = Message(val["topic"], 'del', info)
                pub.send(str(msg))
                LOGGER.debug("Message sent: " + str(msg))

            chains[key]["delete_hook"] = delete_hook

        if not identical:
            LOGGER.debug("Updated " + key)
        else:
            LOGGER.debug("Added " + key)

    for key in (set(chains.keys()) - set(new_chains.keys())):
        chains[key]["notifier"].stop()
        del chains[key]
        LOGGER.debug("Removed " + key)

    LOGGER.debug("Reloaded config from " + filename)
    if old_glob:
        fnames = []
        for pattern in old_glob:
            fnames += glob.glob(pattern)
        if fnames:
            time.sleep(3)
            LOGGER.debug("Touching old files")
            for fname in fnames:
                if os.path.exists(fname):
                    fp_ = open(fname, "ab")
                    fp_.close()
        old_glob = []
    LOGGER.debug("done reloading config")
Beispiel #18
0
def create_notifier(attrs):
    """Create a notifier from the specified configuration attributes *attrs*.
    """

    tmask = (pyinotify.IN_CLOSE_WRITE |
             pyinotify.IN_MOVED_TO |
             pyinotify.IN_CREATE)

    wm_ = pyinotify.WatchManager()

    opath, ofile = os.path.split(globify(attrs["origin"]))

    def fun(pathname):
        """Execute unpacking and copying/moving of *pathname*
        """
        efile = os.path.basename(pathname)
        if fnmatch.fnmatch(efile, ofile):
            LOGGER.info("We have a match: " + str(pathname))
            if attrs["compression"]:
                try:
                    unpack_fun = eval(attrs["compression"])
                    if "prog" in attrs:
                        new_path = unpack_fun(pathname,
                                              attrs["working_directory"],
                                              attrs["prog"])
                    else:
                        new_path = unpack_fun(pathname,
                                              attrs["working_directory"])
                except:
                    LOGGER.exception("Could not decompress " + pathname)
                    return

            else:
                new_path = pathname
            try:
                move_it(new_path, attrs["destinations"],
                        attrs.get("copy_hook", None))
            except Exception, e:
                LOGGER.error("Something went wrong during copy of "
                             + pathname)
            else:
                if attrs["delete"]:
                    try:
                        os.remove(pathname)
                        if attrs["delete_hook"]:
                            attrs["delete_hook"](pathname)
                        LOGGER.debug("Removed " + pathname)
                    except OSError, e__:
                        if e__.errno == 2:
                            LOGGER.debug("Already deleted: " + pathname)
                        else:
                            raise

            # delete temporary file
            if pathname != new_path:
                try:
                    os.remove(new_path)
                except OSError, e__:
                    if e__.errno == 2:
                        pass
                    else:
                        raise
Beispiel #19
0
 def process_notify_publish(pathname):
     pattern = globify(attrs["origin"])
     return process_notify(pathname, publisher, pattern, attrs)
Beispiel #20
0
def create_file_notifier(attrs, publisher):
    """Create a notifier from the specified configuration attributes *attrs*.
    """

    tmask = (pyinotify.IN_CLOSE_WRITE |
             pyinotify.IN_MOVED_TO |
             pyinotify.IN_CREATE |
             pyinotify.IN_DELETE)

    wm_ = pyinotify.WatchManager()

    pattern = globify(attrs["origin"])
    opath = os.path.dirname(pattern)

    if 'origin_inotify_base_dir_skip_levels' in attrs:
        """If you need to inotify monitor for new directories within the origin
        this attribute tells the server how many levels to skip from the origin
        before staring to inorify monitor a directory

        Eg. origin=/tmp/{platform_name_dir}_{start_time_dir:%Y%m%d_%H%M}_{orbit_number_dir:05d}/
                   {sensor}_{platform_name}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.{data_processing_level:3s}

        and origin_inotify_base_dir_skip_levels=-2

        this means the inotify monitor will use opath=/tmp"""
        pattern_list = pattern.split('/')
        pattern_join = os.path.join(*pattern_list[:int(attrs['origin_inotify_base_dir_skip_levels'])])
        opath = os.path.join("/", pattern_join)
        LOGGER.debug("Using {} as base path for pyinotify add_watch.".format(opath))

    def fun(orig_pathname):
        """Publish what we have."""
        if not fnmatch.fnmatch(orig_pathname, pattern):
            return
        elif (os.stat(orig_pathname).st_size == 0):
            # Want to avoid files with size 0.
            return
        else:
            LOGGER.debug('We have a match: %s', orig_pathname)

        pathname = unpack(orig_pathname, **attrs)

        info = attrs.get("info", {})
        if info:
            info = dict((elt.strip().split('=') for elt in info.split(";")))
            for infokey, infoval in info.items():
                if "," in infoval:
                    info[infokey] = infoval.split(",")

        info.update(parse(attrs["origin"], orig_pathname))
        info['uri'] = pathname
        info['uid'] = os.path.basename(pathname)
        info['request_address'] = attrs.get(
            "request_address", get_own_ip()) + ":" + attrs["request_port"]
        msg = Message(attrs["topic"], 'file', info)
        publisher.send(str(msg))
        with file_cache_lock:
            file_cache.appendleft(attrs["topic"] + '/' + info["uid"])
        LOGGER.debug("Message sent: %s", str(msg))

    tnotifier = pyinotify.ThreadedNotifier(wm_, EventHandler(fun, watchManager=wm_, tmask=tmask))

    wm_.add_watch(opath, tmask)

    return tnotifier, fun