def log_parser():
    db = get_db()
    end_time = datetime.now()
    start_time = end_time - timedelta(days=7)
    lp = LogParser(start_time, end_time)
    result = lp.parse()
    return render_template('show_job_entries.html', job_entries=result)
Exemple #2
0
    def sift_log(self):
        """ Implements the logical flow of the multiple parsing stages. """

        # Parse the raw log
        log_parser = LogParser(self.log_file_path, self.event_rules)
        log_parser.parse_log()
        log_events = log_parser.log_events_found

        # Parse the log events
        event_parser = LogEventParser(log_events, self.group_rules)
        event_parser.parse_log_events()
        event_groups = event_parser.event_groups_found

        collect_statistics = self.flags.collect_statistics

        # Parse the event groups
        group_parser = EventGroupParser(event_groups, self.criterias,
                                        collect_statistics)
        group_parser.parse_event_groups()
        statistics = group_parser.statistics_summaries

        # Construct the output
        if self.flags.json_output:
            self.output_to_json(group_parser, collect_statistics)
        else:
            self.output_to_terminal(group_parser)
Exemple #3
0
class TestLogParser(TestCase):
    def setUp(self):
        self.log_parser = LogParser('access.log')

    def tearDown(self):
        pass

    def test_get_most_common(self):
        self.assertEqual(self.log_parser.get_most_common(10),
                         [('198.50.156.189', 167812),
                          ('5.112.235.245', 166722), ('5.114.231.216', 158258),
                          ('5.113.18.208', 157674), ('91.218.225.68', 134376),
                          ('79.62.229.212', 114799), ('149.56.83.40', 97533),
                          ('5.114.64.184', 94043), ('5.113.216.211', 89125),
                          ('158.69.5.181', 88875)])
        self.assertEqual(self.log_parser.get_most_common(1),
                         [('198.50.156.189', 167812)])

    def test_log_by_http_code(self):
        file_name = "access_404"
        pattern = re.compile(r'\s[\d]{3}\s')
        try:
            self.log_parser.log_by_http_code(file_name, 404)
            self.assertTrue(os.path.exists(file_name))
            with open(file_name, "r") as r_log:
                for line in r_log:
                    match = pattern.search(line)
                    if match == "":
                        self.fail()

        finally:
            os.remove(file_name)
Exemple #4
0
def main(argv=None):
    """
    Application entry point
    """
    args = parse_arguments(argv)
    log_parser = LogParser(args.log_file)
    log_parser.parse_file()
    print(log_parser.format_result())
Exemple #5
0
 def GET(self, t_interval):
     web.header('Access-Control-Allow-Origin', '*')
     url = 'http://192.168.20.251/axis-cgi/systemlog.cgi'
     username = '******'
     password = '******'
     parser = LogParser(url, username, password)
     dt = parser.parse_log(t_interval)
     return dt
Exemple #6
0
def parse_all_sessions(files):
    logParser = LogParser();
    sessions = [];

    for file in files:
        session = logParser.parse(file)
        sessions.append(session)

    return sessions
Exemple #7
0
def parse_all_sessions(files):
    logParser = LogParser()
    sessions = []

    for file in files:
        session = logParser.parse(file)
        sessions.append(session)

    return sessions
Exemple #8
0
def main():
    """MAIN PROGRAM"""

    logging.info("Script starting...")
    params = Params()
    logging.info("Parsing parameters...")
    try:
        params.get_args()

    except (errors.InputError, errors.ParamError) as e:
        sys.stderr.write(str(e) + '\n')
        exit(1)

    logging.info("Parsing system calls log file...")

    # Position of system call names in the log
    if params.time_included:
        column_pos = 1
    else:
        column_pos = 0

    parser = LogParser(params.input_log, column_pos)

    #print("Calls analysed: {}".format(parser.system_calls_num)) # TODO

    if params.ngram is not None:
        logging.info("Getting Ngram...")

        feature_vector = parser.ngram(params.ngram, params.normalise, params.syscalls_list)
        #print("Unique sequences: {}".format(len(feature_vector)))

    elif params.co_occurrence_matrix is not None:
        logging.info("Getting Co-occurrence matrix...")

        feature_vector = parser.co_occurrence_matrix(params.co_occurrence_matrix, params.normalise,
                                                           params.syscalls_list)
    else:
        if not params.histogram:
            sys.stderr.write("No option selected, using histogram.\n")

        logging.info("Getting Histogram...")
        feature_vector = parser.histogram(params.normalise, params.syscalls_list)

    if params.csv_values:
        #print(feature_vector.get_csv_values(), file=params.output)
        writer = csv.writer(params.output, lineterminator="\n")
        writer.writerow(feature_vector.get_values())
    else:
        print(feature_vector, file=params.output, end='')

    params.cleanup()
def main():
    # NOTE: For now, we're not going to do this. Want to keep it here in case we go back to it.
    #  add_path_to_registry_startup_key(os.getenv('LOCALAPPDATA') + r'\Programs\FGStats_Client\fgstats_client.exe')

    # TODO: Add a try-catch and send to backend if we got an error here.
    STEAM_ID, STEAM_ACCOUNT_NAME = get_steam_user_info()

    while True:
        for episode in LogParser(follow_file(
                get_fall_guys_log_location())).parse():
            while True:
                try:
                    post('https://api.fgstats.com/client',
                         data=dumps({
                             'steam_id': STEAM_ID,
                             'steam_account_name': STEAM_ACCOUNT_NAME,
                             'episode_info': episode,
                         }))

                    break

                # Don't error if connection issue
                except RequestException:
                    sleep(30)
                    continue
def exec_operation():
    arg_type = ArgType()
    args_res = arg_type.analyze_type

    conf = Config()
    if arg_type.is_monitor:
        perform = Performance(arg_type.process_name, arg_type.message)

    if LifecycleBase._export_to is None:
        LifecycleBase._export_to = conf.export_path
    if Performance._export_to is None:
        Performance._export_to = conf.export_path


    operations = {
        ArgType.SIGN_IN | ArgType.FIRST_SIGN_IN :\
            lambda var, msg: lifecycle_cases.NonSSOLifecycle().first_sign_in(var, msg),

        ArgType.SIGN_IN | ArgType.SECOND_SIGN_IN:\
            lambda var, msg: lifecycle_cases.NonSSOLifecycle().second_sign_in(var, msg),

        ArgType.SIGN_IN | ArgType.FIRST_SIGN_IN | ArgType.SECOND_SIGN_IN:\
            lambda var, msg: lifecycle_cases.NonSSOLifecycle().first_second_sign_in_out(var, msg),

        ArgType.SSO | ArgType.SIGN_IN | ArgType.FIRST_SIGN_IN:\
            lambda var, msg: lifecycle_cases.SSOLifecycle().first_sign_in(var, msg),

        ArgType.SSO | ArgType.SIGN_IN | ArgType.SECOND_SIGN_IN:\
            lambda var, msg: lifecycle_cases.SSOLifecycle().second_sign_in(var, msg),

        ArgType.SSO | ArgType.SIGN_IN | ArgType.FIRST_SIGN_IN | ArgType.SECOND_SIGN_IN:\
            lambda var, msg: lifecycle_cases.SSOLifecycle().first_second_sign_in_out(var, msg),

        ArgType.MONITOR:\
            lambda: perform.begin_monitor(time_span=arg_type.timespan),
    }

    method = operations.get(args_res)
    if method is not None:
        if arg_type.is_monitor:
            method()
        else:
            log_list = LogParser(conf.log_file_path, conf.log_file_tag)
            method(log_list, arg_type.message)
            if arg_type.clean:
                log_list.clean_log()
                logging.info("Log cleaned")
Exemple #11
0
def log_parser_plugin(cmd):

    print('Start plugin......')
    settings = get_settings(cmd)
    if not settings:
        print('Invalid command line flags')
        return 1

    print('Start log_parser...')
    lp = LogParser(settings)
    results = lp.log_process()

    if results['status'] != 'OK':
        print(results['error'])
        return 1
    print(results['message'])
    return 0
 def test_nginx_log_parsing(self):
     log_format = '$remote_addr - $remote_user [$time_local]  "$request" $status $body_bytes_sent "$http_referer" "$http_user_agent" "$request_body"'
     log_type = 'nginx'
     parser = LogParser(log_format, log_type)
     record = '127.0.0.1 - - [08/Jul/2016:08:45:58 -0400]  "POST /place HTTP/1.1" 404 233 "-" "curl/7.38.0" "way"'
     parsed = parser.parse(record)
     self.assertEqual(parsed, {'request.first_line': 'POST /place HTTP/1.1',
              'request.headers.referer': '-',
              'request.headers.user-agent': 'curl/7.38.0',
              'request.method': 'POST',
              'request.protocol': 'HTTP/1.1',
              'request.src_ip': '127.0.0.1',
              'request.time': '08/Jul/2016:08:45:58 -0400',
              'request.uri': '/place',
              'request.user': '******',
              'request.body': 'way',
              'response.body_size': '233',
              'response.status': '404'})
 def test_apache_log_parsing(self):
     log_format = '%h %l %u %t "%r" %>s %O "%{Referer}i" "%{User-Agent}i"' 
     log_type = 'apache'
     parser = LogParser(log_format, log_type)
     record = '127.0.0.1 - - [12/Jul/2016:14:00:22 +0300] "GET /test HTTP/1.1" 200 10956 "-" "curl/7.47.0"'
     parsed = parser.parse(record)
     self.assertEqual(parsed, {'request.client_id': '-',
          'request.first_line': 'GET /test HTTP/1.1',
          'request.headers.referer': '-',
          'request.headers.user-agent': 'curl/7.47.0',
          'request.method': 'GET',
          'request.protocol': 'HTTP/1.1',
          'request.src_ip': '127.0.0.1',
          'request.time': '[12/Jul/2016:14:00:22 +0300]',
          'request.uri': '/test',
          'request.user': '******',
          'response.size': '10956',
          'response.status': '200'})
Exemple #14
0
def main():
    args = get_args()
    VERBOSE = get_verbosity(args)
    DPLoaded = get_date_parser_loaded()
    start_day = get_start_day(args, DPLoaded)
    duration = args.duration
    filename = get_filename(args)
    create_string = get_create_string(args, filename)
    filetype = args.type

    if VERBOSE:
        print_message(args, start_day, duration, filename, create_string,
                      filetype)

    if args.update:
        parser = LogParser(filename)
        parser.update_log()
    else:
        write_log_table(filename, create_string, start_day, duration, filetype)
def main():
    from config import Config
    conf = Config()

    log_list = LogParser(conf.log_file_path, conf.log_file_tag)
    # non_sso_lifecycle = NonSSOLifecycle()
    # non_sso_lifecycle.first_sign_in(log_list)
    sso_lifecycle = SSOLifecycle()
    sso_lifecycle.second_sign_in(log_list)
    print sso_lifecycle.to_dict()
class WordsGenerator():
    def __init__(self) :
        self.log_parser = LogParser()
        self.log_parser.loadParsedLog(FilePath.ROOTPATH + FilePath.PARSED_LOG)
        textlist = self.log_parser.outputAllTexts()
        self.wordlist = self.makeWordList(textlist)
        self.markov = self.makeMarkov()
        
    def makeWordList(self,textlist):
        t = MeCab.Tagger("-Owakati")
        wordlist = []
        for line in textlist:
            #一度に一定行数(10万くらい?超えると読み込めなくなってmがNoneになる
            m = t.parse(line).rstrip(" \n").split(" ") #split(" ")しないと空白含め1バイトずつ保存されてしまう 
            wordlist.extend(m)
        return wordlist
    def makeMarkov(self):
        markov = {}
        w1 = ""
        w2 = ""
        for word in self.wordlist:
            if w1 and w2:
                if (w1, w2) not in markov:
                    markov[(w1, w2)] = []
                markov[(w1, w2)].append(word)
            w1, w2 = w2, word
        return markov
    def makeSentence(self,l):
        # Generate Sentence
        count = 0
        sentence = ""
        w1, w2  = random.choice(self.markov.keys())
        while count < l:
            if not self.markov.has_key((w1,w2)):
                break
            tmp = random.choice(self.markov[(w1, w2)])
            sentence += tmp
            w1, w2 = w2, tmp
            count += 1
        return sentence
Exemple #17
0
   def make_app(self):
      app = tornado.web.Application([
         # Additional endpoints to test the service is up and running.
         (r"/stats/total", TotalGameHandler),
         (r"/stats", SingleGameHandler),
         (r"/stats/([^/]+)", PastGameHandler),
         (r"/update", UpdateStatsHandler)
      ])
      app.game_tracker = GameTracker(GAME_ID)
      app.stats_parser = StatsParser(GAME_ID) #, single=False, game_ids=GAME_IDS, filename="20.05-25.05")
      app.log_parser = LogParser(GAME_ID)

      return app
def main():
    parser = argparse.ArgumentParser(description='Convert logs to custom fileformat(now supports json and yaml fileformats)', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('input_file', help='file with input logs', type=argparse.FileType('r'))
    parser.add_argument('output_file', help='file with parsed data', type=argparse.FileType('w'))
    # default nginx log_format: '$remote_addr - $remote_user [$time_local] "$request" $status $body_bytes_sent "$http_referer" "$http_user_agent"' 
    # Note: spaces in log_format is important!
    parser.add_argument('log_format', help='''logs format. Supports: nginx log_format and apache CustomLog, also you may use custom format''')
    parser.add_argument('--log_type', help='type of log file.', choices=['nginx', 'apache', 'custom'], default='nginx')
    parser.add_argument('--result_type', help='type of parsed file', choices=['yaml', 'json'], default='json')
    parser.add_argument('-f', '--follow',  action='store_true', default=False,  help='process appended data as the file grows')
    args = parser.parse_args()
    
    parser = LogParser(args.log_format, args.log_type)
    parsed_logs = []
    try:
        for record in (tailer.follow(args.input_file) if args.follow else args.input_file):
            parsed_logs.append(parser.parse(record))
    except KeyboardInterrupt:
        print('interrupt received, stopping.')
    finally:
        if args.result_type == 'yaml':
            yaml.dump(parsed_logs, args.output_file, default_flow_style=False)
        elif args.result_type == 'json':
            json.dump(parsed_logs, args.output_file)
def get_parser(parser_type, path_to_file):
    parser = None
    if parser_type == "script":
        try:
            fd = open(path_to_file)
            parser = ScriptParser(fd.read())
        except IOError as e:
            print 'failed to open script file ' + path_to_file
    elif parser_type == "log":
        try:
            fd = open(path_to_file)
            parser = LogParser(fd.read())
        except IOError as e:
            print 'failed to open log file ' + path_to_file
    return parser
Exemple #20
0
def get_log_file_stats():
    """
    Returns the stats for the configured log file as json, according to the LogParser on the base path '/'.
    If no stats for the log file were found a "Log file not found" message is returned. This might indicate that
    the log file could not be found. Please refer to app logs in that case.

    :return: json of the stats | Log not found message
    """
    res = LogParser.get_stats(LOG_FILE)
    if not isinstance(res, Result):
        logging.warning(
            "File with path {} could not be parse!".format(LOG_FILE))
        return make_response(
            jsonify(message="Log file for path {} could not be parse!".format(
                LOG_FILE)), 400)
    return make_response(res.get_as_json(), 200)
Exemple #21
0
class testParser(unittest.TestCase):
    def setUp(self):
        self.parser = LogParser('games.log')

    def test_load(self):
        self.assertEqual(self.parser.filename, 'games.log')
        self.assertEqual(self.parser.games, [])
        self.assertEqual(self.parser.content, '')
        self.parser.load_to_memory()
        self.assertNotEqual(self.parser.content, '')

    def test_import(self):
        self.assertEqual(len(self.parser.games), 0)
        self.parser.load_to_memory()
        self.parser.import_games()
        self.assertEqual(len(self.parser.games), 21)

    def test_import_without_load(self):
        self.parser.import_games()
        self.assertEqual(len(self.parser.games), 0)
Exemple #22
0
def main():
    """MAIN PROGRAM"""

    logging.info("Script starting...")
    params = Params()
    logging.info("Parsing parameters...")
    try:
        params.get_args()

    except (errors.InputError, errors.ParamError) as e:
        sys.stderr.write(str(e) + '\n')
        exit(1)

    logging.info("Parsing system calls log file...")
    parser = LogParser(params.input_log, params.time_included)

    print("Calls analysed: {}".format(parser.system_calls_num))  # TODO

    print(parser, file=params.output)

    params.cleanup()
Exemple #23
0
def update_measures(m_id, is_new_measure=False):
    if m_id is None:
        raise AttributeError("Measure id is None")
    else:
        measures = models.Measures.select().where(models.Measures.measure_id == int(m_id)).order_by(models.Measures.id)

    measures_lst = []
    measures_lst.append(['Id', 'Temperature Outside', 'Engine Temperature', 'Pressure', 'Voltage', 'Fuel level'])
    for m in measures:
        measures_lst.append(m.to_list())
    lp = LogParser()
    if int(is_new_measure) == 1:
        lp.parse(truncate_log=True, new_measure=True)
    else:
        lp.parse(truncate_log=True)

    return jsonify(measures_lst)
Exemple #24
0
django.setup()

## continue normal imports
from log_parser import LogParser
from availability_app.models import Tracker

logging.basicConfig(
    filename=os.environ['AVL_API__LOG_PATH'],
    level=logging.DEBUG,
    format=
    '[%(asctime)s] %(levelname)s [%(module)s-%(funcName)s()::%(lineno)d] %(message)s',
    datefmt='%d/%b/%Y %H:%M:%S',
)
log = logging.getLogger(__name__)

parser = LogParser()

os.nice(19)


class LogAnalyzer(object):
    """ Analyzes logs and populates db. """
    def __init__(self):
        self.last_checked_json_path = os.environ[
            'AVL_API__LAST_CHECKED_JSON_PATH']
        self.log_dir_path = os.path.dirname(os.environ['AVL_API__LOG_PATH'])
        self.last_read_log_datetime = None
        self.legit_ips = json.loads(os.environ['AVL_API__LEGIT_IPS_JSON'])
        self.legit_user_agents = json.loads(
            os.environ['AVL_API__LEGIT_USER_AGENTS_JSON'])
Exemple #25
0
    def run(self):
        """ The main routine which controls everything """
        framecount = 0

        # Create drawing tool to use to draw everything - it'll create its own screen
        drawing_tool = DrawingTool(wdir_prefix)
        drawing_tool.set_window_title_info(
            update_notifier=(" v" + self.tracker_version))
        opt = Options()

        parser = LogParser(wdir_prefix, self.tracker_version, LogFinder())

        event_result = None
        state = None
        custom_title_enabled = opt.custom_title_enabled
        read_from_server = opt.read_from_server
        write_to_server = opt.write_to_server
        game_version = opt.game_version
        state_version = -1
        twitch_username = None
        new_states_queue = []
        screen_error_message = None
        retry_in = 0
        update_timer = opt.log_file_check_seconds
        last_game_version = None

        while event_result != Event.DONE:
            # Check for events and handle them
            event_result = drawing_tool.handle_events()

            # The user checked or unchecked the "Custom Title Enabled" checkbox
            if opt.custom_title_enabled != custom_title_enabled:
                custom_title_enabled = opt.custom_title_enabled
                drawing_tool.update_window_title()

            # The user started or stopped watching someone from the server (or they started watching a new person from the server)
            if opt.read_from_server != read_from_server or opt.twitch_name != twitch_username:
                twitch_username = opt.twitch_name
                read_from_server = opt.read_from_server
                new_states_queue = []
                # Also restart version count if we go back and forth from log.txt to server
                if read_from_server:
                    state_version = -1
                    state = None
                    # Change the delay for polling, as we probably don't want to fetch it every second
                    update_timer_override = 2
                    # Show who we are watching in the title bar
                    drawing_tool.set_window_title_info(
                        watching=True,
                        watching_player=twitch_username,
                        updates_queued=len(new_states_queue))
                else:
                    drawing_tool.set_window_title_info(watching=False)
                    update_timer_override = 0

            # The user started or stopped broadcasting to the server
            if opt.write_to_server != write_to_server:
                write_to_server = opt.write_to_server
                drawing_tool.set_window_title_info(
                    uploading=opt.write_to_server)

            if opt.game_version != game_version:
                parser.reset()
                game_version = opt.game_version

            # Force refresh state if we updated options or if we need to retry
            # to contact the server.
            if (event_result == Event.OPTIONS_UPDATE
                    or (screen_error_message is not None and retry_in == 0)):
                # By setting the framecount to 0 we ensure we'll refresh the state right away
                framecount = 0
                screen_error_message = None
                retry_in = 0
                # Force updates after changing options
                if state is not None:
                    state.modified = True

            # normally we check for updates based on how the option is set
            # when doing network stuff, this can be overridden
            update_delay = opt.log_file_check_seconds
            if update_timer_override != 0:
                update_delay = update_timer_override

            # Now we re-process the log file to get anything that might have loaded;
            # do it every update_timer seconds (making sure to truncate to an integer
            # or else it might never mod to 0)
            frames_between_checks = int(Options().framerate_limit *
                                        update_delay)
            if frames_between_checks <= 0:
                frames_between_checks = 1

            if framecount % frames_between_checks == 0:
                if retry_in != 0:
                    retry_in -= 1
                # Let the parser do his thing and give us a state
                if opt.read_from_server:
                    base_url = opt.trackerserver_url + "/tracker/api/user/" + opt.twitch_name
                    json_dict = None
                    try:
                        json_version = urllib.request.urlopen(
                            base_url + "/version").read()
                        if int(json_version) > state_version:
                            # FIXME better handling of 404 error ?
                            json_state = urllib.request.urlopen(
                                base_url).read()
                            json_dict = json.loads(json_state)
                            new_state = TrackerState.from_json(json_dict)
                            if new_state is None:
                                raise Exception("server gave us empty state")
                            state_version = int(json_version)
                            new_states_queue.append((state_version, new_state))
                            drawing_tool.set_window_title_info(
                                updates_queued=len(new_states_queue))
                    except Exception:
                        state = None
                        log_error("Couldn't load state from server\n" +
                                  traceback.format_exc())
                        if json_dict is not None:
                            if "tracker_version" in json_dict:
                                their_version = json_dict["tracker_version"]
                            else:
                                # This is the only version that can upload to the server but doesn't include a version string
                                their_version = "0.10-beta1"

                            if their_version != self.tracker_version:
                                screen_error_message = "They are using tracker version " + their_version + " but you have " + self.tracker_version
                else:
                    force_draw = state and state.modified
                    state = parser.parse()
                    if force_draw and state is not None:
                        state.modified = True
                    if write_to_server and not opt.trackerserver_authkey:
                        screen_error_message = "Your authkey is blank. Get a new authkey in the options menu and paste it into the authkey text field."
                    if state is not None and write_to_server and state.modified and screen_error_message is None:
                        opener = urllib.request.build_opener(
                            urllib.request.HTTPHandler)
                        put_url = opt.trackerserver_url + "/tracker/api/update/" + opt.trackerserver_authkey
                        json_string = json.dumps(
                            state, cls=TrackerStateEncoder,
                            sort_keys=True).encode("utf-8")
                        request = urllib.request.Request(put_url,
                                                         data=json_string)
                        request.add_header('Content-Type', 'application/json')
                        request.get_method = lambda: 'PUT'
                        try:
                            result = opener.open(request)
                            result_json = json.loads(result.read())
                            updated_user = result_json["updated_user"]
                            if updated_user is None:
                                screen_error_message = "The server didn't recognize you. Try getting a new authkey in the options menu."
                            else:
                                screen_error_message = None
                        except Exception as e:
                            log_error(
                                "ERROR: Couldn't send item info to server\n" +
                                traceback.format_exc())
                            screen_error_message = "ERROR: Couldn't send item info to server, check tracker_log.txt"
                            # Retry to write the state in 10*update_timer (aka 10 sec in write mode)
                            retry_in = 10

            # Check the new state at the front of the queue to see if it's time to use it
            if len(new_states_queue) > 0:
                (state_timestamp, new_state) = new_states_queue[0]
                current_timestamp = int(time.time())
                if current_timestamp - state_timestamp >= opt.read_delay or opt.read_delay == 0 or state is None:
                    state = new_state
                    new_states_queue.pop(0)
                    drawing_tool.set_window_title_info(
                        updates_queued=len(new_states_queue))

            if state is None and screen_error_message is None:
                if read_from_server:
                    screen_error_message = "Unable to read state from server. Please verify your options setup and tracker_log.txt"
                    # Retry to read the state in 5*update_timer (aka 10 sec in read mode)
                    retry_in = 5
                else:
                    screen_error_message = "log.txt for " + opt.game_version + " not found. Make sure you have the right game selected in the options."

            if screen_error_message is not None:
                drawing_tool.write_error_message(screen_error_message)
            else:
                # We got a state, now we draw it
                drawing_tool.draw_state(state, framecount)

            # if we're watching someone and they change their game version, it can require us to reset
            if state and last_game_version != state.game_version:
                drawing_tool.reset_options()
                last_game_version = state.game_version

            drawing_tool.tick()
            framecount += 1

        # Main loop finished; program is exiting
        drawing_tool.save_window_position()
        Options().save_options(wdir_prefix + "options.json")
Exemple #26
0
            agent_labels.append(agent)
            plot, = plt.plot(ucs_levels, linewidth=2, color=color)
            total_plots.append(plot)

            counter += 1

        plt.legend(total_plots, agent_labels, loc=2)

        plt.xlabel('Games')
        plt.ylabel('Number of Iterations')
        plt.title('foobar')
        plt.show()


if __name__ == '__main__':
    parser = LogParser()
    results = parser.parse_logs()
    plotter = Plotter()
    for game_set, games in results.iteritems():
        plotter.plot_quality_lines(games)
        plotter.plot_balance_lines(games)
        plotter.bar_plot_days_stayed(games)
        plotter.bar_plot_days_stayed_vs_total(games)
        plotter.bar_plot_ucs_levels(games)
        plotter.bar_plot_qualities(games)

        # if game_set == 3:
        #     game = games[1]
        #     title = "Quality Rating of Agents - Game 10"
        #     plotter.plot_quality_lines([game], title)
        #     title = "Bank Balance of Agents - Game 10"
Exemple #27
0
    params = sys.argv
    argc = len(params)
    
    if argc <= 1 : 
        print "********************************************"
        print "Invalid options"
        print "-game"
        print "-parse output input"
        print "-learn"
        print "-infer rolename"
        print "********************************************"
        exit()

    mode = params[1]
    if mode == "-parse" :
        log_parser = LogParser()
        if (argc <= 1):
            log_srcfilename = FilePath.ROOTPATH + FilePath.SOURCE_LOG
            log_parser.getLog(log_srcfilename)
            parsed_logfilename = FilePath.ROOTPATH + FilePath.PARSED_LOG
        elif (argc == 2):
            print "error : 出力ファイル名を指定してください\n"
            exit()
        elif (argc >= 3):
            srcfile = FilePath.ROOTPATH + FilePath.LOGFILES + params[2]
            log_parser.getLog(srcfile)
            parsed_logfilename = FilePath.ROOTPATH + FilePath.LOGFILES + params[3]

        log_parser.saveParsedLog(parsed_logfilename,"w+")
    elif mode == "-learn":
        
Exemple #28
0
class Statistics:
    def __init__(self, db_path: str):
        self.db_path = db_path
        self.parser = LogParser()

    def calculate_statistics(self):
        print("Loading logs from db...")
        logs = load_logs_from_db(self.db_path, offset=1000000, limit=200000)
        progress_bar = tqdm(logs, position=1)
        for log in progress_bar:
            parsed_rounds = self.parser.split_log_to_game_rounds(log["log_content"])

            self.find_yakumans(log["log_id"], parsed_rounds, progress_bar)

            # result = self.find_high_level_games(log["log_id"], parsed_rounds)
            # if result:
            #     progress_bar.write(result)

    def find_yakumans(self, log_id, parsed_rounds, progress_bar):
        for round_data in parsed_rounds:
                if self.parser.is_agari_tag(tag):

                    # if 'yaku=' in tag:
                    #     han = sum([int(x) for x in self.parser.get_attribute_content(tag, 'yaku').split(',')[1::2]])
                    #     if han >= 20:
                    #         progress_bar.write(str(han) + f' https://tenhou.net/0/?log={log_id}')

                    if 'yakuman=' in tag:
                        yakuman = self.parser.get_attribute_content(tag, 'yakuman').split(',')
                        if ('37' in yakuman or '38' in yakuman) and len(yakuman) >= 2:
                            progress_bar.write(str(len(yakuman)) + f' https://tenhou.net/0/?log={log_id}')

                        # if len(yakuman) >= 3:
                        #     progress_bar.write(str(len(yakuman)) + f' https://tenhou.net/0/?log={log_id}')

    def find_high_level_games(self, log_id: str, parsed_rounds: List) -> Optional[str]:
        start_game_tag = parsed_rounds[0][0]
        ranks = self.parser.comma_separated_string_to_ints(
            self.parser.get_attribute_content(start_game_tag, "dan")
        )

        allowed_ranks = {
            18: "九段",
            19: "十段",
            20: "天鳳位",
        }

        suitable_table = all([x in allowed_ranks for x in ranks])
        if not suitable_table:
            return None

        first_player = unquote(self.parser.get_attribute_content(start_game_tag, "n0"))
        second_player = unquote(self.parser.get_attribute_content(start_game_tag, "n1"))
        third_player = unquote(self.parser.get_attribute_content(start_game_tag, "n2"))
        fourth_player = unquote(self.parser.get_attribute_content(start_game_tag, "n3"))

        result = [
            f"http://tenhou.net/0/?log={log_id}",
            first_player,
            allowed_ranks[ranks[0]],
            second_player,
            allowed_ranks[ranks[1]],
            third_player,
            allowed_ranks[ranks[2]],
            fourth_player,
            allowed_ranks[ranks[3]],
        ]

        return ",".join(result)
Exemple #29
0
from log_parser import LogParser
from feature_extractor import FeatureExtractor
from clustering import Clustering
from idf import IDF

parser = LogParser('../../data/HDFS_2K.log')
tagged_events, log_sequences = parser.parse()

extractor = FeatureExtractor(log_sequences, list(tagged_events.keys()))
log_sequences = extractor.extract()

clustering = Clustering(log_sequences, tagged_events)
cluster_ids, cluster_values, silhouette = clustering.cluster()

for cluster_value in cluster_values:
    if cluster_value['num_possible_abnormal_events'] != 0:
        print(cluster_value)
        print()

#print ("El coeficiente de silueta es =", silhouette)

#/usr/bin/python3

#Python script to run cron task daily to create database log entries

from cron_monitor import app
from log_parser import LogParser
from datetime import datetime,timedelta
from flask_mysqldb import MySQL
from flask import Flask, g
import config
end_time = datetime.now()
start_time = end_time - timedelta(days=1)

ctx = app.app_context()
ctx.push()

app.config['MYSQL_USER'] = config.MYSQL_DATABASE_USER
app.config['MYSQL_PASSWORD'] = config.MYSQL_DATABASE_PASSWORD
app.config['MYSQL_DB'] = config.MYSQL_DATABASE_DB
app.config['MYSQL_HOST'] = config.MYSQL_DATABASE_HOST
app.config['MYSQL_CURSORCLASS'] = 'DictCursor'
mysql = MySQL(app)
g.mysql_db = mysql.connection
lp = LogParser(start_time, end_time)
result = lp.parse()
Exemple #31
0
 def __init__(self, db_path: str):
     self.db_path = db_path
     self.parser = LogParser()
Exemple #32
0
def main(arguments: Optional[List[str]] = None) -> None:
    def parse_ignored_ips(x: str) -> List[ipaddress.IPv4Network]:
        return [
            ipaddress.ip_network(address, strict=False)
            for address in x.split(',')
        ]

    parser = argparse.ArgumentParser(description='Process log files.')

    group = parser.add_mutually_exclusive_group(required=False)
    group.add_argument(
        '--batch',
        '-b',
        action='store_const',
        dest="run_type",
        const=RunType.BATCH,
        help='Print a report on one or more completed log files.  The default.'
    )
    group.add_argument('--summary',
                       action='store_const',
                       dest="run_type",
                       const=RunType.SUMMARY,
                       help="Show the slugs that have been used in a log file")
    group.add_argument('--cronjob',
                       action='store_const',
                       dest="run_type",
                       const=RunType.BATCH,
                       help="Deprecated.  Use --batch instead")
    group.add_argument('--realtime',
                       '--interactive',
                       '-i',
                       '-r',
                       action='store_const',
                       dest="run_type",
                       const=RunType.REALTIME,
                       help='Watch a single log file in realtime')
    group.add_argument(
        '--xxfake-realtime',
        action='store_const',
        dest="run_type",
        const=RunType.FAKE_REALTIME,
        help=argparse.SUPPRESS,
    )
    parser.set_defaults(run_type=RunType.BATCH)

    group2 = parser.add_mutually_exclusive_group()
    group2.add_argument('--by-ip',
                        action='store_true',
                        dest='by_ip',
                        help='Sorts batched logs by host ip')
    group2.add_argument('--by-time',
                        action='store_false',
                        dest='by_ip',
                        help='Sorts batched logs by session start time')

    parser.add_argument('--html',
                        action='store_true',
                        dest='uses_html',
                        help='Generate html output rather than text output')
    parser.add_argument('--no-sessions',
                        action='store_true',
                        dest='no_sessions',
                        help="Don't generate detailed session information")

    parser.add_argument(
        '--date',
        '--cronjob-date',
        action='store',
        dest='date',
        help=
        'Date for --batch.  One of -<number>, yyyy-mm, or yyyy-mm-dd.  default is today.'
    )

    parser.add_argument('--api-host-url',
                        default=DEFAULT_FIELDS_PREFIX,
                        metavar='URL',
                        dest='api_host_url',
                        help='base url to access the information')
    parser.add_argument('--reverse-dns',
                        '--dns',
                        action='store_true',
                        dest='uses_reverse_dns',
                        help='Attempt to resolve the real host name')
    parser.add_argument(
        '--ignore-ip',
        '-x',
        default=[],
        action="append",
        metavar='cidrlist',
        dest='ignore_ip',
        type=parse_ignored_ips,
        help='list of ips to ignore.  May be specified multiple times')
    parser.add_argument(
        '--session-timeout',
        default=60,
        type=int,
        metavar="minutes",
        dest='session_timeout_minutes',
        help='a session ends after this period (minutes) of inactivity')
    parser.add_argument('--manifest',
                        default=[],
                        action='append',
                        dest='manifests')

    parser.add_argument(
        '--output',
        '-o',
        dest='output',
        help=
        "output file.  default is stdout.  For --batch, specifies the output pattern"
    )
    parser.add_argument(
        '--sessions-relative-directory',
        dest="sessions_relative_directory",
        help="relative directory into which to store the sessions information")
    parser.add_argument('--configuration',
                        dest='configuration_file',
                        default='opus.configuration',
                        help="location of python configuration file")

    # Stores DNS entries in a persistent database
    parser.add_argument('--xxdns-cache',
                        action="store_true",
                        dest="dns_cache",
                        help=argparse.SUPPRESS)

    # Debugging hack that shows all log entries
    parser.add_argument('--xxshowall',
                        action='store_true',
                        dest='debug_show_all',
                        help=argparse.SUPPRESS)

    # Caches the read entries into a database, rather than reading the log files anew each time.
    parser.add_argument('--xxcached_log_entry',
                        action='store_true',
                        dest='cached_log_entries',
                        help=argparse.SUPPRESS)

    parser.add_argument('log_files',
                        nargs=argparse.REMAINDER,
                        help='log files')
    args = parser.parse_args(arguments)

    run_type = cast(RunType, args.run_type)

    if run_type == RunType.BATCH:
        # Fix up the arguments to match what everyone else wants
        expand_globs_and_dates(args)
    elif args.glob:
        args.log_files = [
            file for pattern in args.log_files for file in glob.glob(pattern)
        ]
        args.manifests = [
            file for pattern in args.manifests for file in glob.glob(pattern)
        ]

    # args.ignored_ip comes out as a list of lists, and it needs to be flattened.
    args.ignored_ips = [ip for arg_list in args.ignore_ip for ip in arg_list]
    args.ip_to_host_converter = \
        IpToHostConverter.get_ip_to_host_converter(**vars(args))

    module = importlib.import_module(args.configuration_file)
    configuration = cast(AbstractConfiguration,
                         module.Configuration(**vars(args)))  # type: ignore
    log_parser = LogParser(configuration, **vars(args))

    if run_type == RunType.REALTIME:
        if len(args.log_files) != 1:
            raise Exception(
                "Must specify exactly one file for real-time mode.")
        log_entries_realtime = LogReader.read_logs_from_tailed_file(
            args.log_files[0])
        log_parser.run_realtime(log_entries_realtime)
    else:
        if len(args.log_files) < 1:
            raise Exception("Must specify at least one log file.")
        if args.cached_log_entries:
            log_entries_list = handle_cached_log_entries(args)
        else:
            log_entries_list = LogReader.read_logs(args.log_files)

        if run_type == RunType.BATCH:
            log_parser.run_batch(log_entries_list)
        elif run_type == RunType.SUMMARY:
            log_parser.run_summary(log_entries_list)
        elif run_type == RunType.FAKE_REALTIME:
            log_entries_list.sort(key=operator.attrgetter('time'))
            log_parser.run_realtime(iter(log_entries_list))
Exemple #33
0
            if temp is not None:
                players.append(temp)
            else:
                break
        return players


hideout = Hideout()

if __name__ == '__main__':

    logger.setLevel(level=logging.DEBUG)

    console_log = logging.StreamHandler()
    formatter = logging.Formatter(
        '%(asctime)s %(levelname)s %(lineno)d:%(filename)s(%(process)d) - %(message)s'
    )
    console_log.setFormatter(formatter)
    logger.addHandler(console_log)

    from log_parser import LogParser
    import time

    parser = LogParser(config.LOG_PATH)
    parser.start()

    hideout.start()

    while True:
        time.sleep(1)
Exemple #34
0
from log_parser import LogParser
from game import GameReporter
import pprint, sys
p = LogParser("games.log")
p.load_to_memory()
p.import_games()


def start():
    while True:
        print('loading from games.log...')
        select = input("Please select one of the following options\n" +
                       "   1 - Simple report\n" +
                       "   2 - Simple Report Paginated\n" +
                       "   3 - Detailed Report Paginated\n" +
                       "   4 - Single Report By ID\n" +
                       "   5 - Overall Ranking\n" + "   6 - Exit\n")
        if select == '1':
            simple_report()
        if select == '2':
            simple_report_paginated()
        if select == '3':
            detailed_report_paginated()
        if select == '4':
            single_report()
        if select == '5':
            overall_ranking()
        if select == '6':
            sys.exit(0)

 def __init__(self) :
     self.log_parser = LogParser()
     self.log_parser.loadParsedLog(FilePath.ROOTPATH + FilePath.PARSED_LOG)
     textlist = self.log_parser.outputAllTexts()
     self.wordlist = self.makeWordList(textlist)
     self.markov = self.makeMarkov()
Exemple #36
0
from log_parser import LogParser
import matplotlib.pyplot as plt

if __name__ == '__main__':
    logs = LogParser('text.log')
    for mode in logs.get_mods_values():
        plt.title(mode)
        plt.xlabel('time')
        plt.ylabel('speed')
        plt.grid()
        x = [
            i * 200 for i in range(
                len(logs.get_mods_values()[mode][logs.get_values_name()[-1]]))
        ]
        for value in logs.get_mods_values()[mode]:
            plt.plot(x, logs.get_mods_values()[mode][value], label=value)
        plt.legend(bbox_to_anchor=(1.05, 1),
                   loc='upper left',
                   borderaxespad=0.)
        plt.show()
    def run(self):
        """ The main routine which controls everything """

        update_notifier = self.check_for_update()
        framecount = 0

        # Create drawing tool to use to draw everything - it'll create its own screen
        drawing_tool = DrawingTool(self.file_prefix)
        drawing_tool.set_window_title(update_notifier)
        parser = LogParser(self.file_prefix, self.tracker_version)
        opt = Options()
        log = logging.getLogger("tracker")

        event_result = None
        state = None
        read_from_server = opt.read_from_server
        write_to_server = opt.write_to_server
        state_version = -1
        twitch_username = None
        new_states_queue = []
        screen_error_message = None

        while event_result != Event.DONE:

            # Check for events and handle them
            event_result = drawing_tool.handle_events()
            # A change means the user has (de)activated an option
            if opt.read_from_server != read_from_server\
            or opt.twitch_name != twitch_username:
                twitch_username = opt.twitch_name
                read_from_server = opt.read_from_server
                new_states_queue = []
                # Also restart version count if we go back and forth from log.txt to server
                if read_from_server:
                    state_version = -1
                    state = None
                    # show who we are watching in the title bar
                    drawing_tool.set_window_title(update_notifier, watching_player=twitch_username, updates_queued=len(new_states_queue))
                else:
                    drawing_tool.set_window_title(update_notifier)

            if opt.write_to_server and opt.write_to_server != write_to_server:
                write_to_server = True
                drawing_tool.set_window_title(update_notifier, uploading=True)

            if not opt.write_to_server:
                write_to_server = False

            if opt.read_from_server:
                # Change the delay for polling, as we probably don't want to fetch it every second
                update_timer = 2
            else:
                update_timer = self.read_timer

            if event_result == Event.OPTIONS_UPDATE:
                # By setting the framecount to 0 we ensure we'll refresh the state right away
                framecount = 0
                screen_error_message = None
                # force updates after changing options
                if state is not None:
                    state.modified = True


            # Now we re-process the log file to get anything that might have loaded;
            # do it every update_timer seconds (making sure to truncate to an integer
            # or else it might never mod to 0)
            if (framecount % int(Options().framerate_limit * update_timer) == 0):
                # Let the parser do his thing and give us a state
                if opt.read_from_server:
                    base_url = opt.trackerserver_url + "/tracker/api/user/" + opt.twitch_name
                    json_dict = None
                    try:
                        json_version = urllib2.urlopen(base_url + "/version").read()
                        if int(json_version) > state_version:
                            # FIXME better handling of 404 error ?
                            json_state = urllib2.urlopen(base_url).read()
                            json_dict = json.loads(json_state)
                            new_state = TrackerState.from_json(json_dict)
                            if new_state is None:
                                raise Exception
                            state_version = int(json_version)
                            new_states_queue.append((state_version, new_state))
                            drawing_tool.set_window_title(update_notifier, watching_player=twitch_username, updates_queued=len(new_states_queue), read_delay=opt.read_delay)
                    except Exception:
                        state = None
                        log.error("Couldn't load state from server")
                        import traceback
                        log.error(traceback.format_exc())
                        if json_dict is not None:
                            their_version = ""
                            if "tracker_version" in json_dict:
                                their_version = json_dict["tracker_version"]
                            else:
                                # this is the only version that can upload to the server but doesn't include a version string
                                their_version = "0.10-beta1"

                            if their_version != self.tracker_version:
                                screen_error_message = "They are using tracker version " + their_version + " but you have " + self.tracker_version
                else:
                    force_draw = state and state.modified
                    state = parser.parse()
                    if force_draw:
                        state.modified = True
                    if write_to_server and not opt.trackerserver_authkey:
                        screen_error_message = "Your authkey is blank. Get a new authkey in the options menu and paste it into the authkey text field."
                    if state is not None and write_to_server and state.modified and screen_error_message is None:
                        opener = urllib2.build_opener(urllib2.HTTPHandler)
                        put_url = opt.trackerserver_url + "/tracker/api/update/" + opt.trackerserver_authkey
                        json_string = json.dumps(state, cls=TrackerStateEncoder, sort_keys=True)
                        request = urllib2.Request(put_url,
                                                  data=json_string)
                        request.add_header('Content-Type', 'application/json')
                        request.get_method = lambda: 'PUT'
                        try:
                            result = opener.open(request)
                            result_json = json.loads(result.read())
                            updated_user = result_json["updated_user"]
                            if updated_user is None:
                                screen_error_message = "The server didn't recognize you. Try getting a new authkey in the options menu."
                            else:
                                screen_error_message = None
                        except Exception as e:
                            import traceback
                            errmsg = traceback.format_exc()
                            log.error("ERROR: Couldn't send item info to server")
                            log.error(errmsg)
                            screen_error_message = "ERROR: Couldn't send item info to server, check tracker_log.txt"


            # check the new state at the front of the queue to see if it's time to use it
            if len(new_states_queue) > 0:
                (state_timestamp, new_state) = new_states_queue[0]
                current_timestamp = int(time.time())
                if current_timestamp - state_timestamp >= opt.read_delay or state is None:
                    state = new_state
                    new_states_queue.pop(0)
                    drawing_tool.set_window_title(update_notifier, watching_player=twitch_username, updates_queued=len(new_states_queue), read_delay=opt.read_delay)



            if state is None and screen_error_message is None:
                if read_from_server:
                    screen_error_message = "Unable to read state from server. Please verify your options setup and tracker_log.txt"
                else:
                    screen_error_message = "log.txt not found. Put the RebirthItemTracker folder inside the isaac folder, next to log.txt"

            if screen_error_message is not None:
                drawing_tool.write_error_message(screen_error_message)
            else:
                # We got a state, now we draw it
                drawing_tool.draw_state(state)

            drawing_tool.tick()
            framecount += 1

        # main loop finished. program is exiting
        drawing_tool.save_window_position()
    def run(self):
        """ The main routine which controls everything """
        framecount = 0

        # Create drawing tool to use to draw everything - it'll create its own screen
        drawing_tool = DrawingTool(wdir_prefix)
        drawing_tool.set_window_title_info(update_notifier=(" v" + self.tracker_version))
        opt = Options()

        parser = LogParser(wdir_prefix, self.tracker_version, LogFinder())

        event_result = None
        state = None
        custom_title_enabled = opt.custom_title_enabled
        read_from_server = opt.read_from_server
        write_to_server = opt.write_to_server
        game_version = opt.game_version
        state_version = -1
        twitch_username = None
        new_states_queue = []
        screen_error_message = None
        retry_in = 0
        update_timer = opt.log_file_check_seconds
        last_game_version = None

        while event_result != Event.DONE:
            # Check for events and handle them
            event_result = drawing_tool.handle_events()

            # The user checked or unchecked the "Custom Title Enabled" checkbox
            if opt.custom_title_enabled != custom_title_enabled:
                custom_title_enabled = opt.custom_title_enabled
                drawing_tool.update_window_title()

            # The user started or stopped watching someone from the server (or they started watching a new person from the server)
            if opt.read_from_server != read_from_server or opt.twitch_name != twitch_username:
                twitch_username = opt.twitch_name
                read_from_server = opt.read_from_server
                new_states_queue = []
                # Also restart version count if we go back and forth from log.txt to server
                if read_from_server:
                    state_version = -1
                    state = None
                    # Change the delay for polling, as we probably don't want to fetch it every second
                    update_timer_override = 2
                    # Show who we are watching in the title bar
                    drawing_tool.set_window_title_info(watching=True, watching_player=twitch_username, updates_queued=len(new_states_queue))
                else:
                    drawing_tool.set_window_title_info(watching=False)
                    update_timer_override = 0

            # The user started or stopped broadcasting to the server
            if opt.write_to_server != write_to_server:
                write_to_server = opt.write_to_server
                drawing_tool.set_window_title_info(uploading=opt.write_to_server)

            if opt.game_version != game_version:
                parser.reset()
                game_version = opt.game_version

            # Force refresh state if we updated options or if we need to retry
            # to contact the server.
            if (event_result == Event.OPTIONS_UPDATE or
                (screen_error_message is not None and retry_in == 0)):
                # By setting the framecount to 0 we ensure we'll refresh the state right away
                framecount = 0
                screen_error_message = None
                retry_in = 0
                # Force updates after changing options
                if state is not None:
                    state.modified = True

            # normally we check for updates based on how the option is set
            # when doing network stuff, this can be overridden
            update_delay = opt.log_file_check_seconds
            if update_timer_override != 0:
                update_delay = update_timer_override
                
            # Now we re-process the log file to get anything that might have loaded;
            # do it every update_timer seconds (making sure to truncate to an integer
            # or else it might never mod to 0)
            frames_between_checks = int(Options().framerate_limit * update_delay)
            if frames_between_checks <= 0:
                frames_between_checks = 1
            
            if framecount % frames_between_checks == 0:
                if retry_in != 0:
                    retry_in -= 1
                # Let the parser do his thing and give us a state
                if opt.read_from_server:
                    base_url = opt.trackerserver_url + "/tracker/api/user/" + opt.twitch_name
                    json_dict = None
                    try:
                        json_version = urllib.request.urlopen(base_url + "/version").read()
                        if int(json_version) > state_version:
                            # FIXME better handling of 404 error ?
                            json_state = urllib.request.urlopen(base_url).read()
                            json_dict = json.loads(json_state, "utf-8")
                            new_state = TrackerState.from_json(json_dict)
                            if new_state is None:
                                raise Exception("server gave us empty state")
                            state_version = int(json_version)
                            new_states_queue.append((state_version, new_state))
                            drawing_tool.set_window_title_info(updates_queued=len(new_states_queue))
                    except Exception:
                        state = None
                        log_error("Couldn't load state from server\n" + traceback.format_exc())
                        if json_dict is not None:
                            if "tracker_version" in json_dict:
                                their_version = json_dict["tracker_version"]
                            else:
                                # This is the only version that can upload to the server but doesn't include a version string
                                their_version = "0.10-beta1"

                            if their_version != self.tracker_version:
                                screen_error_message = "They are using tracker version " + their_version + " but you have " + self.tracker_version
                else:
                    force_draw = state and state.modified
                    state = parser.parse()
                    if force_draw:
                        state.modified = True
                    if write_to_server and not opt.trackerserver_authkey:
                        screen_error_message = "Your authkey is blank. Get a new authkey in the options menu and paste it into the authkey text field."
                    if state is not None and write_to_server and state.modified and screen_error_message is None:
                        opener = urllib.request.build_opener(urllib.request.HTTPHandler)
                        put_url = opt.trackerserver_url + "/tracker/api/update/" + opt.trackerserver_authkey
                        json_string = json.dumps(state, cls=TrackerStateEncoder, sort_keys=True)
                        request = urllib.request.Request(put_url,
                                                  data=json_string)
                        request.add_header('Content-Type', 'application/json')
                        request.get_method = lambda: 'PUT'
                        try:
                            result = opener.open(request)
                            result_json = json.loads(result.read())
                            updated_user = result_json["updated_user"]
                            if updated_user is None:
                                screen_error_message = "The server didn't recognize you. Try getting a new authkey in the options menu."
                            else:
                                screen_error_message = None
                        except Exception as e:
                            log_error("ERROR: Couldn't send item info to server\n" + traceback.format_exc())
                            screen_error_message = "ERROR: Couldn't send item info to server, check tracker_log.txt"
                            # Retry to write the state in 10*update_timer (aka 10 sec in write mode)
                            retry_in = 10

            # Check the new state at the front of the queue to see if it's time to use it
            if len(new_states_queue) > 0:
                (state_timestamp, new_state) = new_states_queue[0]
                current_timestamp = int(time.time())
                if current_timestamp - state_timestamp >= opt.read_delay or opt.read_delay == 0 or state is None:
                    state = new_state
                    new_states_queue.pop(0)
                    drawing_tool.set_window_title_info(updates_queued=len(new_states_queue))

            if state is None and screen_error_message is None:
                if read_from_server:
                    screen_error_message = "Unable to read state from server. Please verify your options setup and tracker_log.txt"
                    # Retry to read the state in 5*update_timer (aka 10 sec in read mode)
                    retry_in = 5
                else:
                    screen_error_message = "log.txt for " + opt.game_version + " not found. Make sure you have the right game selected in the options."

            if screen_error_message is not None:
                drawing_tool.write_error_message(screen_error_message)
            else:
                # We got a state, now we draw it
                drawing_tool.draw_state(state)

            # if we're watching someone and they change their game version, it can require us to reset
            if state and last_game_version != state.game_version:
                drawing_tool.reset_options()
                last_game_version = state.game_version

            drawing_tool.tick()
            framecount += 1

        # Main loop finished; program is exiting
        drawing_tool.save_window_position()
        Options().save_options(wdir_prefix + "options.json")
Exemple #39
0
from log_parser import LogParser
from cluster import Cluster, NodeFactory
from converter import LogsToEventsConverter
from events import EventLoop

N = 4
WIDTH = HEIGHT = 600
TITLE = "Fast Paxos"
LOGFILE = "../../../logs/test.log"

factory = NodeFactory((20, 20), 8)
cluster = Cluster(TITLE, N, (WIDTH, HEIGHT), factory)

loop = EventLoop()

logs = LogParser.parse(LOGFILE)
converter = LogsToEventsConverter(cluster, loop)
converter.convert(logs)

loop.run()

cluster.close()
Exemple #40
0
def main(arguments: Optional[List[str]] = None) -> None:
    def parse_ignored_ips(x: str) -> List[ipaddress.IPv4Network]:
        return [ipaddress.ip_network(address, strict=False) for address in x.split(',')]

    parser = argparse.ArgumentParser(description='Process log files.')

    group = parser.add_mutually_exclusive_group(required=True)
    group.add_argument('--realtime', '--interactive', '-i', '-r', action='store_true',
                       help='Watch a single log file in realtime')
    group.add_argument('--batch', '-b', action='store_true',
                       help='Print a report on one or more completed log files')
    group.add_argument('--summary', action='store_true', dest='summary',
                       help="Show the slugs that have been used in a log file")
    group.add_argument('--cronjob', action='store_true', dest='cronjob',
                       help="Used by the chron job to generate a daily summary")
    group.add_argument('--xxfake-realtime', action='store_true', help=argparse.SUPPRESS, dest='fake_realtime')

    group2 = parser.add_mutually_exclusive_group()
    group2.add_argument('--by-ip', action='store_true', dest='by_ip',
                        help='Sorts batched logs by host ip')
    group2.add_argument('--by-time', action='store_false', dest='by_ip',
                        help='Sorts batched logs by session start time')

    parser.add_argument('--html', action='store_true', dest='uses_html',
                        help='Generate html output rather than text output')

    parser.add_argument('--cronjob-date', action='store', dest='cronjob_date',
                        help='Date for --cronjob.  One of -<number>, yyyy-mm, or yyyy-mm-dd.  default is today.')

    parser.add_argument('--api-host-url', default=DEFAULT_FIELDS_PREFIX, metavar='URL', dest='api_host_url',
                        help='base url to access the information')
    parser.add_argument('--reverse-dns', '--dns', action='store_true', dest='uses_reverse_dns',
                        help='Attempt to resolve the real host name')
    parser.add_argument('--ignore-ip', '-x', default=[], action="append", metavar='cidrlist', dest='ignore_ip',
                        type=parse_ignored_ips,
                        help='list of ips to ignore.  May be specified multiple times')
    parser.add_argument('--session-timeout', default=60, type=int, metavar="minutes", dest='session_timeout_minutes',
                        help='a session ends after this period (minutes) of inactivity')

    parser.add_argument('--output', '-o', dest='output',
                        help="output file.  default is stdout.  For --cronjob, specifies the output pattern")

    # TODO(fy): Temporary hack for when I don't have internet access
    parser.add_argument('--xxlocal', action="store_true", dest="uses_local", help=argparse.SUPPRESS)
    # TODO(fy): Debugging hack that shows all URLs.
    parser.add_argument('--xxshowall', action='store_true', dest='debug_show_all', help=argparse.SUPPRESS)

    parser.add_argument('log_files', nargs=argparse.REMAINDER, help='log files')
    args = parser.parse_args(arguments)

    if args.cronjob:
        # Fix up the arguments to match what everyone else wants
        convert_cronjob_to_batchjob(args)
        if not args.log_files:
            print("No log files found.")
            return

    # args.ignored_ip comes out as a list of lists, and it needs to be flattened.
    args.ignored_ips = [ip for arg_list in args.ignore_ip for ip in arg_list]
    # Another fake argument we need
    args.ip_to_host_converter = IpToHostConverter.get_ip_to_host_converter(args.uses_reverse_dns, args.uses_local)

    module = importlib.import_module("opus.session_info")
    configuration = cast(AbstractConfiguration, module.Configuration(**vars(args)))  # type: ignore
    log_parser = LogParser(configuration, **vars(args))

    if args.realtime:
        if len(args.log_files) != 1:
            raise Exception("Must specify exactly one file for real-time mode.")
        log_entries_realtime = LogReader.read_logs_from_tailed_file(args.log_files[0])
        log_parser.run_realtime(log_entries_realtime)
    else:
        if len(args.log_files) < 1:
            raise Exception("Must specify at least one log file.")
        log_entries_list = LogReader.read_logs(args.log_files)
        if args.batch:
            log_parser.run_batch(log_entries_list)
        elif args.summary:
            log_parser.run_summary(log_entries_list)
        elif args.fake_realtime:
            log_entries_list.sort(key=operator.attrgetter('time'))
            log_parser.run_realtime(iter(log_entries_list))