Beispiel #1
0
def parse_past_logs(netlog_path,
                    netlog_prefix,
                    callback,
                    args=(),
                    kwargs={},
                    start_time=None):
    if args is None:
        args = ()
    if kwargs is None:
        kwargs = {}


#         log = logging.getLogger("com.fussyware.edproxy")

    eq = EDEventQueue()
    eq.add_listener(callback, *args, **kwargs)

    regex_pre21 = re.compile(REGEXP_PRE21)
    regex_post21 = re.compile(REGEXP_POST21)

    loglist = _get_log_files(netlog_path, netlog_prefix)

    if loglist:
        if start_time is None:
            file_date, _ = loglist[0]
            start_time = file_date

        for file_date, filename in loglist:
            file_date = file_date.date()

            if file_date >= start_time.date():
                logfile = open(filename, "r")

                prev_time = None
                for line in iter(logfile):
                    match = regex_post21.search(line)
                    if match:
                        file_date, prev_time, parsed_line = _handle_netlog_regex_match(
                            match, file_date, prev_time, start_time)

                        if parsed_line:
                            #                                 log.debug("We have parsed a new line into something: [%s]" % str(parsed_line))
                            eq.post(parsed_line)
                    else:
                        match = regex_pre21.search(line)
                        if match:
                            file_date, prev_time, parsed_line = _handle_netlog_regex_match(
                                match, file_date, prev_time, start_time)

                            if parsed_line:
                                #                                     log.debug("We have parsed a new line into something: [%s]" % str(parsed_line))
                                eq.post(parsed_line)

                logfile.close()

    eq.flush()
Beispiel #2
0
def parse_past_journals(journal_path,
                        journal_prefix,
                        callback,
                        args=(),
                        kwargs={},
                        start_time=None):
    if args is None:
        args = ()
    if kwargs is None:
        kwargs = {}


#         log = logging.getLogger("com.fussyware.edproxy")

    eq = EDEventQueue()
    eq.add_listener(callback, *args, **kwargs)

    loglist = _get_log_files(journal_path, journal_prefix)

    if loglist:
        if start_time is None:
            # Send all of the most recent file since the most recent game-start line if an explicit time isn't passed.
            file_date, last_log_name = loglist[-1]
            start_time = file_date

            logfile = open(last_log_name, "r")
            prev_time = None
            for line in iter(logfile):
                line_json = json.loads(line)

                if 'timestamp' in line_json and 'event' in line_json:
                    try:
                        if (line_json['event'] == 'LoadGame'):
                            start_time = datetime.datetime.strptime(
                                line_json['timestamp'], "%Y-%m-%dT%H:%M:%SZ")
                            start_time = datetime_utc_to_local(start_time)

                    except ValueError:
                        pass

            logfile.close()

        if start_time is None:
            return

        offset_start_time = start_time - datetime.timedelta(seconds=3)

        for file_date, filename in loglist:
            file_date = file_date.date()

            if file_date >= start_time.date():
                logfile = open(filename, "r")

                prev_time = None
                for line in iter(logfile):
                    line_json = json.loads(line)

                    if 'timestamp' in line_json and 'event' in line_json:
                        try:
                            line_time = datetime.datetime.strptime(
                                line_json['timestamp'], "%Y-%m-%dT%H:%M:%SZ")
                            line_time = datetime_utc_to_local(line_time)

                            _previous_time = line_time
                            parsed_line = JournalLineFactory.get_line(
                                line_time, line_json)

                            # Rather annoyingly, "Cargo" and "Loadout" are sent *before* the "LoadGame" event...
                            # They're practically certain to be within a few seconds before though, so give a little
                            # leeway for 'Cargo' and 'Loadout' entries.
                            # Also always send 'Fileheader' as a reset marker.
                            if parsed_line and (
                                    line_json['event'] == 'Fileheader'
                                    or line_time >= start_time or
                                ((line_json['event'] == 'Cargo'
                                  or line_json['event'] == 'Loadout')
                                 and line_time >= offset_start_time)):
                                eq.post(parsed_line)
                        except ValueError:
                            pass

                logfile.close()

    eq.flush()
Beispiel #3
0
class EDNetlogMonitor(RegexMatchingEventHandler):
    def __init__(self, logfile_prefix="netLog"):
        super(EDNetlogMonitor,
              self).__init__([r".*%s\.\d+\.\d+\.log" % logfile_prefix], [],
                             True, False)

        self._log = logging.getLogger("com.fussyware.edproxy")

        # Python issue7980 bug workaround
        datetime.datetime.strptime('2012-01-01', '%Y-%m-%d')

        self._lock = threading.Lock()
        self._stop_event = threading.Event()
        self._running = False
        self._prefix = logfile_prefix
        self._event_queue = EDEventQueue()
        self._observer = None

        self._logfilename = None
        self._logfile = None

        self._regex_pre21 = re.compile(REGEXP_PRE21)
        self._regex_post21 = re.compile(REGEXP_POST21)

        self._regex_journal = re.compile(REGEXP_JOURNAL)

    def start(self, netlog_path):
        with self._lock:
            if not self._running:
                if not netlog_path or len(netlog_path) == 0:
                    raise ValueError("Invalid netlog path specified.")

                self._running = True

                loglist = _get_log_files(netlog_path, self._prefix)
                if loglist:
                    file_date, self._logfilename = loglist[-1]
                    self._date_created = file_date.date()

                    self._logfile = open(self._logfilename, "r")
                    self._logfile.seek(0, os.SEEK_END)
                    self._where = self._logfile.tell()
                    self._logfile.close()
                else:
                    self._logfilename = None
                    self._logfile = None
                    self._date_created = None
                    self._where = 0

                self._previous_time = None

                self._log.debug("Parsing [%s] [%s]" %
                                (self._logfilename, str(self._date_created)))

                self._observer = observers.Observer()
                self._observer.schedule(self, netlog_path, False)
                self._observer.start()

                self._thread = threading.Thread(
                    target=self.__file_modified_thread)
                self._thread.daemon = True
                self._thread.start()

    def stop(self):
        with self._lock:
            if self._running:
                self._running = False
                self._observer.stop()
                self._observer.join()
                self._observer = None

                self._stop_event.set()

                if self._logfilename:
                    self._logfilename = None

    def is_running(self):
        with self._lock:
            return self._running

    def add_listener(self, callback, *args, **kwargs):
        self._event_queue.add_listener(callback, *args, **kwargs)

    def set_netlog_prefix(self, value):
        self._prefix = value


#         self._regexes = [ re.compile(r".*%s\.\d+\.\d+\.log" % self._prefix, re.I) ]

    def get_netlog_prefix(self):
        return self._prefix

    def on_created(self, event):
        with self._lock:
            self._log.debug("New netLog file created: [%s]" % event.src_path)
            self._logfilename = event.src_path

            self._date_created = self.__get_logfile_date(event.src_path)
            self._previous_time = None
            self._where = 0

            self.__parse_log()

    def __file_modified_thread(self):
        while self.is_running():
            with self._lock:
                self.__parse_log()

                size = os.stat(self._logfilename)

            modified = False
            while self.is_running() and not modified:
                if size.st_size == os.stat(self._logfilename).st_size:
                    self._stop_event.wait(1.0)
                    self._stop_event.clear()
                else:
                    modified = True

    def __get_logfile_date(self, path):
        try:
            _, filename = os.path.split(path)

            date = datetime.datetime.strptime(
                filename.split(".")[1], "%y%m%d%H%M")
        except ValueError:
            date = datetime.datetime.strptime(
                filename.split(".")[1], "%y%m%d%H%M%S")

        return date.date()

    def __parse_log(self):
        try:
            self._logfile = open(self._logfilename, "r")
            self._logfile.seek(self._where)

            line = self._logfile.readline()

            while self._running and line:
                #                 self._log.debug("Read in line from log: [%s]" % line)
                match = self._regex_post21.search(line)
                if match:
                    #               self._log.debug("Read line is Post-2.1")
                    self._date_created, self._previous_time, parsed_line = _handle_netlog_regex_match(
                        match, self._date_created, self._previous_time)

                    if parsed_line:
                        #                   self._log.debug("We have parsed a new line into something: [%s]" % str(parsed_line))
                        self._event_queue.post(parsed_line)
                else:
                    match = self._regex_pre21.search(line)
                    if match:
                        self._date_created, self._previous_time, parsed_line = _handle_netlog_regex_match(
                            match, self._date_created, self._previous_time)

                        if parsed_line:
                            self._event_queue.post(parsed_line)

                self._where = self._logfile.tell()
                line = self._logfile.readline()

            self._logfile.close()
        except:
            self._log.exception("Failed reading from the logfile.")
Beispiel #4
0
class EDJournalMonitor(RegexMatchingEventHandler):
    def __init__(self, logfile_prefix="Journal"):
        super(EDJournalMonitor,
              self).__init__([r".*%s\.\d+\.\d+\.log" % logfile_prefix], [],
                             True, False)

        self._log = logging.getLogger("com.fussyware.edproxy")

        # Python issue7980 bug workaround
        datetime.datetime.strptime('2012-01-01', '%Y-%m-%d')

        self._lock = threading.Lock()
        self._stop_event = threading.Event()
        self._running = False
        self._prefix = logfile_prefix
        self._event_queue = EDEventQueue()
        self._observer = None

        self._logfilename = None
        self._logfile = None

    def start(self, journal_path):
        with self._lock:
            if not self._running:
                if not journal_path or len(journal_path) == 0:
                    raise ValueError("Invalid journal path specified.")

                self._running = True

                loglist = _get_log_files(journal_path, self._prefix)
                if loglist:
                    file_date, self._logfilename = loglist[-1]
                    self._date_created = file_date.date()

                    self._logfile = open(self._logfilename, "r")
                    self._logfile.seek(0, os.SEEK_END)
                    self._where = self._logfile.tell()
                    self._logfile.close()
                else:
                    self._logfilename = None
                    self._logfile = None
                    self._date_created = None
                    self._where = 0

                self._previous_time = None

                self._log.debug("Parsing [%s] [%s]" %
                                (self._logfilename, str(self._date_created)))

                self._observer = observers.Observer()
                self._observer.schedule(self, journal_path, False)
                self._observer.start()

                self._thread = threading.Thread(
                    target=self.__file_modified_thread)
                self._thread.daemon = True
                self._thread.start()

    def stop(self):
        with self._lock:
            if self._running:
                self._running = False
                self._observer.stop()
                self._observer.join()
                self._observer = None

                self._stop_event.set()

                if self._logfilename:
                    self._logfilename = None

    def is_running(self):
        with self._lock:
            return self._running

    def add_listener(self, callback, *args, **kwargs):
        self._event_queue.add_listener(callback, *args, **kwargs)

    def set_journal_prefix(self, value):
        self._prefix = value

    def get_journal_prefix(self):
        return self._prefix

    def on_created(self, event):
        with self._lock:
            self._log.debug("New journal file created: [%s]" % event.src_path)
            self._logfilename = event.src_path

            self._date_created = self.__get_logfile_date(event.src_path)
            self._previous_time = None
            self._where = 0

            self.__parse_log()

    def __file_modified_thread(self):
        while self.is_running():
            with self._lock:
                self.__parse_log()

                size = os.stat(self._logfilename)

            modified = False
            while self.is_running() and not modified:
                if size.st_size == os.stat(self._logfilename).st_size:
                    self._stop_event.wait(1.0)
                    self._stop_event.clear()
                else:
                    modified = True

    def __get_logfile_date(self, path):
        try:
            _, filename = os.path.split(path)

            date = datetime.datetime.strptime(
                filename.split(".")[1], "%y%m%d%H%M")
        except ValueError:
            date = datetime.datetime.strptime(
                filename.split(".")[1], "%y%m%d%H%M%S")

        return date.date()

    def __parse_log(self):
        try:
            self._logfile = open(self._logfilename, "r")
            self._logfile.seek(self._where)

            line = self._logfile.readline()

            while self._running and line:
                #                 self._log.debug("Read in line from log: [%s]" % line)
                line_json = json.loads(line)

                if 'timestamp' in line_json and 'event' in line_json:
                    try:
                        line_time = datetime.datetime.strptime(
                            line_json['timestamp'], "%Y-%m-%dT%H:%M:%SZ")
                        line_time = datetime_utc_to_local(line_time)
                        _previous_time = line_time
                        parsed_line = JournalLineFactory.get_line(
                            line_time, line_json)

                        if parsed_line:
                            self._event_queue.post(parsed_line)
                    except ValueError:
                        pass

                self._where = self._logfile.tell()
                line = self._logfile.readline()

            self._logfile.close()
        except:
            self._log.exception("Failed reading from the logfile.")
Beispiel #5
0
class EDPictureMonitor(PatternMatchingEventHandler):
    def __init__(self, path=''):
        PatternMatchingEventHandler.__init__(self,
                                             patterns=["*.bmp"],
                                             ignore_directories=True)

        self._path = path

        if len(path) > 0:
            _http_root_paths.append(path)

        self.log = logging.getLogger("com.fussyware.edproxy")
        self.log.setLevel(logging.DEBUG)

        self._name_replacement = None
        self._convert_format = IMAGE_CONVERT_FORMAT.BMP
        self._delete_file = False
        self._convert_space = None

        self._event_queue = EDEventQueue()
        self._observer = None

    def add_listener(self, callback, *args, **kwargs):
        self._event_queue.add_listener(callback, *args, **kwargs)

    def is_started(self):
        return (self._observer != None)

    def set_convert_format(self, image_format):
        self._convert_format = image_format

    def set_delete_after_convert(self, delete_file):
        self._delete_file = delete_file

    def set_name_replacement(self, name):
        if name:
            # Make sure we have a valid filename.
            name = name.translate(None, '~#%&*{}\:<>?/+|"')

        if name:
            self._name_replacement = name
        else:
            self._name_replacement = None

    def set_convert_space(self, value):
        if len(value) == 0:
            self._convert_space = None
        else:
            self._convert_space = value

    def set_image_path(self, path):
        self._path = path

        if len(path) > 0 and not path in _http_root_paths:
            _http_root_paths.append(path)

    def get_convert_format(self):
        return self._convert_format

    def get_delete_after_convert(self):
        return self._delete_file

    def get_convert_space(self):
        return self._convert_space

    def get_name_replacement(self):
        return self._name_replacement

    def on_created(self, event):
        _thread = threading.Thread(target=self.__run_imaged, args=(event, ))
        _thread.daemon = True
        _thread.start()

    def __log_parser(self, event):
        if event.get_line_type() == netlogline.NETLOG_LINE_TYPE.SYSTEM:
            self._name_replacement = event.get_name()

    def start(self):
        if not self._observer:
            self._observer = observers.Observer()
            self._observer.schedule(self, self._path, False)

            self._url = "http://%s:%d/" % (edutils.get_ipaddr(), 8097)

            self._thread = threading.Thread(target=self.__run_httpd)
            self._thread.daemon = True
            self._thread.start()

    def stop(self):
        if self._observer:
            self._observer.stop()
            self._observer.join()
            self._observer = None

    def __wait_for_finish(self, path):
        copying = True
        prev_size = 0

        while copying:
            size = os.path.getsize(path)

            if (size != prev_size):
                prev_size = size
                time.sleep(2)
            else:
                copying = False

    def __run_imaged(self, events):
        self.log.debug("New image created. Process...")
        pathname, filename = os.path.split(events.src_path)

        if self._convert_format != IMAGE_CONVERT_FORMAT.BMP:
            output_filename = ""

            if self._name_replacement:
                output_filename = "%s_%s%s" % (
                    self._name_replacement,
                    datetime.now().strftime('%Y-%m-%d_%H-%M-%S'),
                    self._convert_format)
            else:
                output_filename = os.path.splitext(
                    filename)[0] + self._convert_format

            if self._convert_space:
                output_filename = output_filename.replace(
                    " ", self._convert_space)

            filename = output_filename

        converted = False
        convert_attempts = 1

        while not converted and convert_attempts != 5:
            self.log.debug("Image Conversion attempt: [%d]", convert_attempts)
            convert_attempts = convert_attempts + 1

            self.__wait_for_finish(events.src_path)

            try:
                if self._convert_format == IMAGE_CONVERT_FORMAT.BMP:
                    converted = True
                else:
                    PIL.Image.open(events.src_path).save(
                        os.path.join(pathname, filename))
                    converted = True

                    if (self._delete_file):
                        os.remove(events.src_path)
            except Exception as e:
                self.log.error("Failed converting image! [%s]", e)

        self.log.debug("Check to see if we converted.")
        if converted:
            self.log.debug("Yep, we converted so send the event.")
            self._event_queue.post(
                _EDPictureEvent(self._url + urllib.quote_plus(filename)))

        self.log.debug("Finished handling new image.")

    def __run_httpd(self):
        if not self._name_replacement:
            config_path, _ = os.path.split(
                os.path.normpath(edconfig.get_instance().get_netlog_path()))
            config_path = os.path.join(config_path, "AppConfig.xml")

            edparser.parse_past_logs(edconfig.get_instance().get_netlog_path(),
                                     edutils.get_logfile_prefix(config_path),
                                     self.__log_parser)
            edconfig.get_instance().set_image_name_replacement(
                self._name_replacement)

        try:
            self._observer.start()
        except:
            pass