Example #1
0
 def _import_logs(self, filename):
     progress = self.component_manager.get_current("progress_dialog")\
                (self.component_manager)
     progress.set_text(_("Importing history..."))
     parser = TxtLogParser(self.database(), ids_to_parse=self.items_by_id)
     log_dir = os.path.join(os.path.dirname(filename), "history")
     if not os.path.exists(log_dir):
         self.main_widget().information_box(\
             _("No history found to import."))
         return
     filenames = [os.path.join(log_dir, logname) for logname in \
         sorted(os.listdir(log_dir)) if logname.endswith(".bz2")]       
     # log.txt can also contain data we need to import, especially on the
     # initial upgrade from 1.x. 'ids_to_parse' will make sure we only pick
     # up the relevant events. (If we do the importing after having used
     # 2.x for a while, there could be duplicate load events, etc, but these
     # don't matter.)
     filenames.append(os.path.join(os.path.dirname(filename), "log.txt"))
     progress.set_range(0, len(filenames))
     for count, filename in enumerate(filenames):
         progress.set_value(count)
         try:
             parser.parse(filename)
         except:
             self.main_widget().information_box(\
                 _("Ignoring unparsable file:") + " " + filename)
     progress.set_value(len(filenames))
Example #2
0
 def parse_directory(self):       
     self.parser = TxtLogParser(database=self)
     self._delete_indices()  # Takes too long while parsing.
     filenames = [os.path.join(self.log_dir, filename) for filename in \
         sorted(os.listdir(self.log_dir)) if filename.endswith(".bz2")]
     filenames_count = len(filenames)
     for counter, filename in enumerate(filenames):
         sys.stdout.flush()
         if self.con.execute(\
             "select log_name from parsed_logs where parsed_logs.log_name=?",
             (os.path.basename(filename), )).fetchone() is not None:
             print "(%d/%d) %1.1f%% %s already parsed" % \
                   (counter + 1, filenames_count,
                   (counter + 1.) / filenames_count * 100, \
                   os.path.basename(filename))
             continue
         print "(%d/%d) %1.1f%% %s" % (counter + 1, filenames_count,
             (counter + 1.) / filenames_count * 100, \
             os.path.basename(filename))
         try:
             self.parser.parse(filename)
         except KeyboardInterrupt:                                              
             print "Interrupted!"
             exit()
         except:
             print "Can't open file, ignoring."
         self.con.execute("insert into parsed_logs(log_name) values(?)",
             (os.path.basename(filename), ))
         self.con.commit()
     self._create_indices()
Example #3
0
class LogDatabase(object):

    # A subset of the codes from
    # mnemosyne.libmnemosyne.databases.SQLite_logging.py

    STARTED_PROGRAM = 1
    STOPPED_PROGRAM = 2
    STARTED_SCHEDULER = 3
    LOADED_DATABASE = 4
    SAVED_DATABASE = 5
    ADDED_CARD = 12
    UPDATED_CARD = 13
    DELETED_CARD = 14
    REPETITION = 18
   
    def __init__(self, log_dir):
        self.log_dir = log_dir
        self._connection = None
        db_name = os.path.join(self.log_dir, "logs.db")
        initialisation_needed = not os.path.exists(db_name)
        self.con = sqlite3.connect(db_name, timeout=0.1,
                                   isolation_level="EXCLUSIVE")
        self.con.row_factory = sqlite3.Row
        if initialisation_needed:
            self.con.executescript(SCHEMA)

    def parse_directory(self):       
        self.parser = TxtLogParser(database=self)
        self._delete_indices()  # Takes too long while parsing.
        filenames = [os.path.join(self.log_dir, filename) for filename in \
            sorted(os.listdir(self.log_dir)) if filename.endswith(".bz2")]
        filenames_count = len(filenames)
        for counter, filename in enumerate(filenames):
            sys.stdout.flush()
            if self.con.execute(\
                "select log_name from parsed_logs where parsed_logs.log_name=?",
                (os.path.basename(filename), )).fetchone() is not None:
                print "(%d/%d) %1.1f%% %s already parsed" % \
                      (counter + 1, filenames_count,
                      (counter + 1.) / filenames_count * 100, \
                      os.path.basename(filename))
                continue
            print "(%d/%d) %1.1f%% %s" % (counter + 1, filenames_count,
                (counter + 1.) / filenames_count * 100, \
                os.path.basename(filename))
            try:
                self.parser.parse(filename)
            except KeyboardInterrupt:                                              
                print "Interrupted!"
                exit()
            except:
                print "Can't open file, ignoring."
            self.con.execute("insert into parsed_logs(log_name) values(?)",
                (os.path.basename(filename), ))
            self.con.commit()
        self._create_indices()

    def _delete_indices(self):
        self.con.execute("drop index if exists i_log_timestamp;")
        self.con.execute("drop index if exists i_log_user_id;")
        self.con.execute("drop index if exists i_log_object_id;")
        
    def _create_indices(self):
        self.con.execute("create index i_log_timestamp on log (timestamp);")
        self.con.execute("create index i_log_user_id on log (user_id);")
        self.con.execute("create index i_log_object_id on log (object_id);")
                                         
    def log_started_program(self, timestamp, program_name_version):
        self.con.execute(\
            """insert into log(user_id, event, timestamp, object_id)
            values(?,?,?,?)""",
            (self.parser.user_id, self.STARTED_PROGRAM, int(timestamp),
             program_name_version)) 

    def log_stopped_program(self, timestamp):
        self.con.execute(\
            "insert into log(user_id, event, timestamp) values(?,?,?)",
            (self.parser.user_id, self.STOPPED_PROGRAM, int(timestamp)))

    def log_started_scheduler(self, timestamp, scheduler_name):
        self.con.execute(\
            """insert into log(user_id, event, timestamp, object_id)
            values(?,?,?,?)""",
            (self.parser.user_id, self.STARTED_SCHEDULER, int(timestamp),
            scheduler_name))
    
    def log_loaded_database(self, timestamp, scheduled_count,
        non_memorised_count, active_count):
        self.con.execute(\
            """insert into log(user_id, event, timestamp, acq_reps, ret_reps,
            lapses) values(?,?,?,?,?,?)""",
            (self.parser.user_id, self.LOADED_DATABASE, int(timestamp),
            scheduled_count, non_memorised_count, active_count))
        
    def log_saved_database(self, timestamp, scheduled_count,
        non_memorised_count, active_count):
        self.con.execute(\
            """insert into log(user_id, event, timestamp, acq_reps, ret_reps,
            lapses) values(?,?,?,?,?,?)""",
            (self.parser.user_id, self.SAVED_DATABASE, int(timestamp),
            scheduled_count, non_memorised_count, active_count))
        
    def log_added_card(self, timestamp, card_id):
        self.con.execute(\
            """insert into log(user_id, event, timestamp, object_id)
            values(?,?,?,?)""",
            (self.parser.user_id, self.ADDED_CARD, int(timestamp), card_id))
        
    def log_deleted_card(self, timestamp, card_id):
        self.con.execute(\
            """insert into log(user_id, event, timestamp, object_id)
            values(?,?,?,?)""",
            (self.parser.user_id, self.DELETED_CARD, int(timestamp), card_id))
     
    def log_repetition(self, timestamp, card_id, grade, easiness, acq_reps,
        ret_reps, lapses, acq_reps_since_lapse, ret_reps_since_lapse,
        scheduled_interval, actual_interval, new_interval, thinking_time):
        self.con.execute(\
            """insert into log(user_id, event, timestamp, object_id, grade,
            easiness, acq_reps, ret_reps, lapses, acq_reps_since_lapse,
            ret_reps_since_lapse, scheduled_interval, actual_interval,
            new_interval, thinking_time)
            values(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)""",
            (self.parser.user_id, self.REPETITION, int(timestamp), card_id,
            grade, easiness, acq_reps, ret_reps, lapses, acq_reps_since_lapse,
            ret_reps_since_lapse, scheduled_interval, actual_interval,
            new_interval, int(thinking_time)))

    def set_offset_last_rep(self, card_id, offset, last_rep):
        self.con.execute(\
            """insert or replace into _cards(id, offset, last_rep)
            values(?,?,?)""",
            (card_id + self.parser.user_id, offset, int(last_rep)))

    def get_offset_last_rep(self, card_id):
        sql_result = self.con.execute("""select offset, last_rep
           from _cards where _cards.id=?""",
           (card_id + self.parser.user_id, )).fetchone()
        return sql_result["offset"], sql_result["last_rep"]
    
    def update_card_after_log_import(self, id, creation_time, offset):
        pass