예제 #1
0
파일: edparser.py 프로젝트: jimkeir/EDProxy
def parse_past_logs(netlog_path,
                    netlog_prefix,
                    callback,
                    args=(),
                    kwargs={},
                    start_time=None):
    if args is None:
        args = ()
    if kwargs is None:
        kwargs = {}


#         log = logging.getLogger("com.fussyware.edproxy")

    eq = EDEventQueue()
    eq.add_listener(callback, *args, **kwargs)

    regex_pre21 = re.compile(REGEXP_PRE21)
    regex_post21 = re.compile(REGEXP_POST21)

    loglist = _get_log_files(netlog_path, netlog_prefix)

    if loglist:
        if start_time is None:
            file_date, _ = loglist[0]
            start_time = file_date

        for file_date, filename in loglist:
            file_date = file_date.date()

            if file_date >= start_time.date():
                logfile = open(filename, "r")

                prev_time = None
                for line in iter(logfile):
                    match = regex_post21.search(line)
                    if match:
                        file_date, prev_time, parsed_line = _handle_netlog_regex_match(
                            match, file_date, prev_time, start_time)

                        if parsed_line:
                            #                                 log.debug("We have parsed a new line into something: [%s]" % str(parsed_line))
                            eq.post(parsed_line)
                    else:
                        match = regex_pre21.search(line)
                        if match:
                            file_date, prev_time, parsed_line = _handle_netlog_regex_match(
                                match, file_date, prev_time, start_time)

                            if parsed_line:
                                #                                     log.debug("We have parsed a new line into something: [%s]" % str(parsed_line))
                                eq.post(parsed_line)

                logfile.close()

    eq.flush()
예제 #2
0
파일: edparser.py 프로젝트: jimkeir/EDProxy
def parse_past_journals(journal_path,
                        journal_prefix,
                        callback,
                        args=(),
                        kwargs={},
                        start_time=None):
    if args is None:
        args = ()
    if kwargs is None:
        kwargs = {}


#         log = logging.getLogger("com.fussyware.edproxy")

    eq = EDEventQueue()
    eq.add_listener(callback, *args, **kwargs)

    loglist = _get_log_files(journal_path, journal_prefix)

    if loglist:
        if start_time is None:
            # Send all of the most recent file since the most recent game-start line if an explicit time isn't passed.
            file_date, last_log_name = loglist[-1]
            start_time = file_date

            logfile = open(last_log_name, "r")
            prev_time = None
            for line in iter(logfile):
                line_json = json.loads(line)

                if 'timestamp' in line_json and 'event' in line_json:
                    try:
                        if (line_json['event'] == 'LoadGame'):
                            start_time = datetime.datetime.strptime(
                                line_json['timestamp'], "%Y-%m-%dT%H:%M:%SZ")
                            start_time = datetime_utc_to_local(start_time)

                    except ValueError:
                        pass

            logfile.close()

        if start_time is None:
            return

        offset_start_time = start_time - datetime.timedelta(seconds=3)

        for file_date, filename in loglist:
            file_date = file_date.date()

            if file_date >= start_time.date():
                logfile = open(filename, "r")

                prev_time = None
                for line in iter(logfile):
                    line_json = json.loads(line)

                    if 'timestamp' in line_json and 'event' in line_json:
                        try:
                            line_time = datetime.datetime.strptime(
                                line_json['timestamp'], "%Y-%m-%dT%H:%M:%SZ")
                            line_time = datetime_utc_to_local(line_time)

                            _previous_time = line_time
                            parsed_line = JournalLineFactory.get_line(
                                line_time, line_json)

                            # Rather annoyingly, "Cargo" and "Loadout" are sent *before* the "LoadGame" event...
                            # They're practically certain to be within a few seconds before though, so give a little
                            # leeway for 'Cargo' and 'Loadout' entries.
                            # Also always send 'Fileheader' as a reset marker.
                            if parsed_line and (
                                    line_json['event'] == 'Fileheader'
                                    or line_time >= start_time or
                                ((line_json['event'] == 'Cargo'
                                  or line_json['event'] == 'Loadout')
                                 and line_time >= offset_start_time)):
                                eq.post(parsed_line)
                        except ValueError:
                            pass

                logfile.close()

    eq.flush()