Пример #1
0
 def process(self, process_entry):
     __process_entry = config_helper(process_entry)
     op = self.calculate_output_name("{ts}.{ext}", __process_entry.data.time)
     if op != self.out_name:
         self.out_name = op
         self.output_processor.setOutputFileName(os.path.join(self.output_dir, op))
     self.output_processor.process(__process_entry)
Пример #2
0
    def run(self):
        while self.isRunning.running:
            order, c_entries = storage_cache.list_cache()
            if c_entries and self._processor.can_process():
                for entry in order:
                    parts = entry.split("-")
                    cache_time = float(parts[0].split(".")[0])
                    current_time = time.mktime(datetime.datetime.now().timetuple())
                    if abs(cache_time - current_time) > SerialGrabber_Settings.cache_collision_avoidance_delay:
                        entry_path = c_entries[entry]
                        if os.path.isfile(entry_path):
                            try:
                                data = {
                                    "data": storage_cache.read_cache(entry_path),
                                    "entry_path": entry_path,
                                    "entry": entry
                                }

                                if self._processor.process(config_helper(data)):
                                    self.counter.processed()
                                    storage_cache.decache(entry_path)
                            except DebugTransformException, de:
                                self.logger.debug("Debug exception: %s" % de)
                            except BaseException, e:
                                self.logger.error("Failed to process data: %s moving to bad data archive" % e)
                                self.logger.exception(e)
                                self.counter.error()
                                storage_cache.decache(entry_path, type="bad_data")
                        else:
                            self.logger.debug("File is to new. Leaving for next round.")
                    if not self.isRunning.running:
                        self.logger.error("Stopped Running during entry iteration, breaking.")
                        break
Пример #3
0
def start(logger, reader, processor, command):
    try:
        si = status(logger)
        isRunning = running(True)
        c = counter(si)

        params = config_helper({
            "counter": c,
            "running": isRunning
        })

        if issubclass(command.__class__, MultiProcessParameterFactory):
            command.populate_parameters(params)
        if issubclass(reader.__class__, MultiProcessParameterFactory):
            reader.populate_parameters(params)
        if issubclass(processor.__class__, MultiProcessParameterFactory):
            processor.populate_parameters(params)

        watchdog = Watchdog(isRunning)
        register_handler(isRunning, watchdog, reader, processor, command)
        if reader:
            watchdog.start_thread(reader, (isRunning, c, params), "Runner")
        if processor:
            watchdog.start_thread(ProcessorManager(processor), (isRunning, c, params), "Processor")
        if command and reader:
            watchdog.start_thread(command, (isRunning, c, params), "Commander")
        while isRunning.running:
            time.sleep(1)
    finally:
        storage_cache.close_cache()
Пример #4
0
def start(logger, reader, processor, command):
    try:
        si = status(logger)
        isRunning = running(True)
        c = counter(si)

        params = config_helper({"counter": c, "running": isRunning})

        if issubclass(command.__class__, MultiProcessParameterFactory):
            command.populate_parameters(params)
        if issubclass(reader.__class__, MultiProcessParameterFactory):
            reader.populate_parameters(params)
        if issubclass(processor.__class__, MultiProcessParameterFactory):
            processor.populate_parameters(params)

        watchdog = Watchdog(isRunning)
        register_handler(isRunning, watchdog, reader, processor, command)
        if reader:
            watchdog.start_thread(reader, (isRunning, c, params), "Runner")
        if processor:
            watchdog.start_thread(ProcessorManager(processor), (isRunning, c, params), "Processor")
        if command and reader:
            watchdog.start_thread(command, (isRunning, c, params), "Commander")
        while isRunning.running:
            time.sleep(1)
    finally:
        storage_cache.close_cache()
Пример #5
0
 def process(self, process_entry):
     __process_entry = config_helper(process_entry)
     op = self.calculate_output_name("{ts}.{ext}",
                                     __process_entry.data.time)
     if op != self.out_name:
         self.out_name = op
         self.output_processor.setOutputFileName(
             os.path.join(self.output_dir, op))
     self.output_processor.process(__process_entry)
Пример #6
0
 def read_cache(self, cache_filename):
     with open(cache_filename, "rb") as cache_file:
         try:
             cache_entry = json.load(cache_file)
             if constants.binary in cache_entry and cache_entry[constants.binary]:
                 cache_entry[constants.payload] = pickle.loads(base64.b64decode(cache_entry[constants.payload]))
             if not (cache_entry.has_key(constants.timep)) and not (cache_entry.has_key(constants.payload)):
                 self.logger.error("Corrupted Cache Entry: %s de-caching." % cache_filename)
                 self.decache(cache_filename)
                 return None
             return config_helper(cache_entry)
         except ValueError, ve:
             self.logger.error("Corrupted Cache Entry: %s de-caching." % cache_filename)
             self.decache(cache_filename)
             return None
Пример #7
0
 def read_cache(self, cache_filename):
     with open(cache_filename, "rb") as cache_file:
         try:
             cache_entry = json.load(cache_file)
             if constants.binary in cache_entry and cache_entry[constants.binary]:
                 # cache_entry[constants.payload] = pickle.loads(base64.b64decode(cache_entry[constants.payload]))
                 cache_entry[constants.payload] = base64.b64decode(cache_entry[constants.payload])
             if not (cache_entry.has_key(constants.timep)) and not (cache_entry.has_key(constants.payload)):
                 self.logger.error("Corrupted Cache Entry: %s de-caching." % cache_filename)
                 self.decache(cache_filename)
                 return None
             return config_helper(cache_entry)
         except ValueError, ve:
             self.logger.error("Corrupted Cache Entry: %s de-caching." % cache_filename)
             self.decache(cache_filename)
             return None
Пример #8
0
    def run(self):
        while self.isRunning.running:
            order, c_entries = storage_cache.list_cache()
            if c_entries and self._processor.can_process():
                for entry in order:
                    parts = entry.split("-")
                    cache_time = float(parts[0].split(".")[0])
                    current_time = time.mktime(
                        datetime.datetime.now().timetuple())
                    if abs(
                            cache_time - current_time
                    ) > SerialGrabber_Settings.cache_collision_avoidance_delay:
                        entry_path = c_entries[entry]
                        if os.path.isfile(entry_path):
                            try:
                                data = {
                                    "data":
                                    storage_cache.read_cache(entry_path),
                                    "entry_path": entry_path,
                                    "entry": entry
                                }

                                if self._processor.process(
                                        config_helper(data)):
                                    self.counter.processed()
                                    storage_cache.decache(entry_path)
                            except DebugTransformException, de:
                                self.logger.debug("Debug exception: %s" % de)
                            except BaseException, e:
                                self.logger.error(
                                    "Failed to process data: %s moving to bad data archive"
                                    % e)
                                self.logger.exception(e)
                                self.counter.error()
                                storage_cache.decache(entry_path,
                                                      type="bad_data")
                        else:
                            self.logger.debug(
                                "File is to new. Leaving for next round.")
                    if not self.isRunning.running:
                        self.logger.error(
                            "Stopped Running during entry iteration, breaking."
                        )
                        break
Пример #9
0
 def __getitem__(self, key):
     return config_helper(self.__dict__["data"][key])
Пример #10
0
 def __getitem__(self, key):
     return config_helper(self.__dict__["data"][key])