Ejemplo n.º 1
0
    def find_meta_data(self, path, *args):
        # initial setup
        path = self.get_file_path() + path
        dictionary = dict()
        for arg in args:
            dictionary[arg] = ""

        # read_file_line_by_line()
        with open(path) as file:
            for line in file:
                if line.startswith(";"):
                    self.examine_line(dictionary, line, ",")
                else:
                    file.close()
                    break

        # self.__reverse_read_file()
        file = FileReadBackwards(path)
        for line in file:
            if line.startswith(";") and ":" in line:
                self.examine_line(dictionary, line, ":")
            else:
                file.close()
                break
        return dictionary
Ejemplo n.º 2
0
def continue_index(filename):
    f = FileReadBackwards(filename)
    last_line = f.__iter__().__next__()
    try:
        print(last_line)
        last_line = last_line.strip().replace('"',
                                              '').strip("}").strip("{").split(
                                                  ":", 1)  # because f**k.
        return int(last_line[0])
    except:
        print(
            "Ups. There has been a problem with continue file. Please fix this shit."
        )
        sys.exit(-1)
Ejemplo n.º 3
0
def read_log(entryType, nEntries):
    fo = FileReadBackwards(LOG_FILE_PATH, encoding="utf-8")
    entriesRead= 0

    entries = []
    for line in fo:
        entry_dict  = log_entry_decode(line)
        if int(entry_dict['EntryID']) == entryType.value:
            entriesRead += 1
            entries.append(entry_dict)
            if entriesRead == nEntries:
                break
    
    fo.close()
    return entries
Ejemplo n.º 4
0
    def __init__(self, path='/var/log/pwnagotchi.log'):
        self.path = path
        self.last_session = None
        self.last_session_id = ''
        self.last_saved_session_id = ''
        self.duration = ''
        self.duration_human = ''
        self.deauthed = 0
        self.associated = 0
        self.handshakes = 0
        self.peers = 0
        self.last_peer = None
        self._peer_parser = re.compile(
            'detected unit (.+)@(.+) \(v.+\) on channel \d+ \(([\d\-]+) dBm\) \[sid:(.+) pwnd_tot:(\d+) uptime:(\d+)\]'
        )

        lines = []
        with FileReadBackwards(self.path, encoding="utf-8") as fp:
            for line in fp:
                line = line.strip()
                if line != "" and line[0] != '[':
                    continue
                lines.append(line)
                if SessionParser.START_TOKEN in line:
                    break
        lines.reverse()

        self.last_session = lines
        self.last_session_id = hashlib.md5(lines[0].encode()).hexdigest()
        self.last_saved_session_id = self._get_last_saved_session_id()

        self._parse_stats()
Ejemplo n.º 5
0
def readPlayList():
    retnext = 0
    global currTrack
    global homedir
    global playList
    global direction
    nextTrack = ''
    if direction == 'resume' and currTrack != '':
        nextTrack = currTrack
        return (nextTrack)
    if direction == 'previous':
        hplayList = FileReadBackwards(homedir + 'playList' + playList + '.txt',
                                      encoding="utf-8")
    else:
        hplayList = open(homedir + 'playList' + playList + '.txt', 'r')
    for f in hplayList:
        f = f.rstrip('\n')
        if retnext == 1:
            nextTrack = f
            break
        if currTrack == "":
            nextTrack = f
            break
        if currTrack == f:
            retnext = 1
    return (nextTrack)
def log_get_most_recent_lines(limit):
    """Get lines from the back of the log."""
    lines = []
    with FileReadBackwards(LOGFILE_PATH, encoding="utf-8") as frb:
        for line in itertools.islice(frb, limit):
            lines.append(line)
    return lines[::-1]
Ejemplo n.º 7
0
def gather_plot_stats(TEST,DURATION,CYCLE_PERIOD,DEVICE_TYPE,PREDICTOR,OUTPUT_DIR,DATE):
    checker = { "powerPred.overall_fp_rate":False , "powerPred.overall_hit_rate":False} 

    HOME = os.environ['HOME']
    
    test_name = TEST +'_'+DURATION+'_' +CYCLE_PERIOD+'_'+DEVICE_TYPE+'_'+PREDICTOR
    print(test_name)
    path = HOME + '/' + OUTPUT_DIR + '/gem5_out/' + test_name + '/stats.txt'

    storage = {}
    for k in checker.keys():
        storage[k] = 0

    with FileReadBackwards(path, encoding="utf-8") as frb:
        while True:
            line = frb.readline()

            if not line:
                break
            for k in checker.keys():
                if k in line: 
                    checker[k] = True
                    if (line.split()[1]) == 'nan':
                        val = 0
                    else:
                        val = float(line.split()[1])
                    storage[k] = val

            if reduce(lambda a, b: a and b, checker.values()): 
                break
        return storage
def match_error_string(file_path, time_a, time_b, match_string):
    """Searches a string in a log file for all the lines where date is between time_a and time_b

    :param logs_dir: path of the directory where log file is located
    :param time_a: time which defines the starting point of the time frame
    :param time_b: time which defines the ending point of the time frame
    :param match_string: string to be search in any log lines
    :returns: If the string is found it returns True. False is returned otherwise
    """
    with FileReadBackwards(file_path, encoding="utf-8") as frb:
        for line in frb:
            match = re.match(HEALTHCHECK_LOGREGEXP, line)
            if not match:
                continue

            columns = line.split(' ')
            str_date = columns[0] + ' ' + columns[1]
            dt_line = datetime.strptime(str_date, HEALTHCHECK_DATEFORMAT)
            if dt_line < time_a or dt_line > time_b:
                break

            if line.find(match_string) > 0:
                return True

    return False
Ejemplo n.º 9
0
async def last_upgrade(request):
    timpestamp = 0
    founded = False

    with FileReadBackwards("/var/log/apt/history.log", encoding="utf-8") as f:

        while True:
            line = f.readline()
            if not line:
                break

            current_config = line.split(': ')

            if current_config[0] == 'End-Date':
                timestamp = time.mktime(datetime.datetime.strptime(current_config[1].rstrip(), "%Y-%m-%d %H:%M:%S")
                                        .timetuple())
            elif current_config[0] == 'Commandline':
                cmd = current_config[1].rstrip()
                match = re.match(r'^apt(-get)? (.* )?(dist-)?upgrade( -y)?$', cmd)
                if match is not None:
                    founded = True
                    break

        if founded:
            res = {"data": {"utc-timestamp": int(timestamp)}}
        else:
            res = {"error": True, "msg": "No info about upgrade"}

    return web.json_response(res)
Ejemplo n.º 10
0
    def parse(self, skip=False):
        if skip:
            logging.debug("skipping parsing of the last session logs ...")
        else:
            logging.debug("parsing last session logs ...")

            lines = []

            if os.path.exists(self.path):
                with FileReadBackwards(self.path, encoding="utf-8") as fp:
                    for line in fp:
                        line = line.strip()
                        if line != "" and line[0] != '[':
                            continue
                        lines.append(line)
                        if LastSession.START_TOKEN in line:
                            break
                lines.reverse()

            if len(lines) == 0:
                lines.append("Initial Session")

            self.last_session = lines
            self.last_session_id = hashlib.md5(lines[0].encode()).hexdigest()
            self.last_saved_session_id = self._get_last_saved_session_id()

            self._parse_stats()
        self.parsed = True
Ejemplo n.º 11
0
 def get_last_update_date(self):
     with FileReadBackwards('/var/log/apt/history.log') as apt_history_file:
         for block in self._read_apt_blocks(apt_history_file):
             result = self._try_extract_upgrade_date(block)
             if result:
                 return result.timestamp()
     return 0
Ejemplo n.º 12
0
def get_days():
    with FileReadBackwards("res/WUTBEAVRS-1.DEP", encoding="utf-8") as data:
        for line in data:
            if line.strip() == 79 * '=':
                break
        line = data.readline()
        return line.split()[2]
Ejemplo n.º 13
0
def parse_stats(stat_file):
  epoch = []
  stats = {}
  with FileReadBackwards(stat_file, encoding="utf-8") as sf:
    for line in sf:
      if line.strip() == "":
        continue
      elif "End Simulation Statistics" in line:
        stats = {}
      elif "Begin Simulation Statistics" in line:
        epoch.append(stats)
      else:
        stat = []
        sstr = re.sub('\s+', ' ', line).strip()
        if('-----' in sstr):
          continue
        elif(sstr == ''):
          continue
        elif(sstr.split(' ')[1] == '|'):
          # Ruby Stats
          l = []
          for i in sstr.split('|')[1:]:
            l.append(i.strip().split(' '))
          stat.append("ruby_multi")
          stat.append(l)
        else:
          stat.append("single")
          stat.append(sstr.split(' ')[1])
        stats["stats."+sstr.split(' ')[0]] = stat
  print("Read "+str(len(epoch))+" Epochs")
  return epoch
Ejemplo n.º 14
0
def getPeriodLog(path, lastNHour=24):
    now = datetime.datetime.now()
    last = now - datetime.timedelta(hours=lastNHour)
    try:
        with FileReadBackwards(path, encoding="utf-8") as frb:
            arr = []
            lastLine = ""
            lastValid = False
            while True:
                l = frb.readline()
                if lastLine:
                    if lastValid:
                        date_parsed = getDateFromLine(lastLine)
                        if date_parsed and date_parsed > last:
                            arr.append(lastLine)
                            lastLine = l
                    else:
                        lastLine = l + "\n" + lastLine
                else:
                    lastLine = l
                lastValid = isValidLogLine(l)
                if not l:
                    break
    except FileNotFoundError:
        print(f"Log file not found: {path}")
    else:
        return list(reversed(arr))
Ejemplo n.º 15
0
 def showLog(self):
     parsed_path = urlparse(self.path)
     filename = os.path.dirname(os.path.abspath(__file__)) + parsed_path.path
     query = parse_qs(parsed_path.query)
     # print("\n\n!!!start open==", filename, query, query['start'])
     data = ""
     start = 0
     end = 100
     cnt = 0
     try:
         if(query['start'][0]):
             start = int(query['start'][0])
         if(query['end'][0]):
             end = int(query['end'][0])
     except:
         print("can not get query parameters " + str(self.path))
     print("query=", start, end)
     try:
         with FileReadBackwards(filename, encoding="utf-8") as fh:
             for line in fh:
                 if(cnt >= start and cnt < end):
                     data += line + "\n"
                 cnt += 1
         data += "Total lines===" + str(cnt) + "\n"
         # print(cnt, "data=", filename, "\n=", data)
         self.send_header('Content-type', 'text/html')
         self.end_headers()
         self.wfile.write(str.encode(data))
     except:
         print("".join(traceback.format_stack()))
         self.send_header('Content-type', 'text/html')
         self.end_headers()
         self.wfile.write(bytes("Error open log", "utf8"))
     return
Ejemplo n.º 16
0
    def watch(self):
        with FileReadBackwards(self.filename, encoding="utf-8") as frb:
            for line in frb:
                if not "DHCPACK" in line:
                    continue

                client = self.client(line)
                if not client:
                    break

                self.save(client)

        fp = open(self.filename, 'r')
        fp.seek(0, 2)

        while True:
            try:
                line = fp.readline()

            except:
                continue

            if not line:
                time.sleep(1)
                continue

            if "DHCPACK" in line:
                client = self.client(line)

                if not client:
                    continue

                self.save(client)
Ejemplo n.º 17
0
def already_scraped(post_id, created):
    ''' 
	Check to see if submission with 
	scrape_key = "submission.name , submission.permalink" 
	exists in file
	'''

    # To only check the post going back so far
    # Use date post made onto board and set the date back SEARCH_BACK_DAYS days
    post_date_check = created - (DAY_IN_SECS * SEARCH_BACK_DAYS)
    # Read line by line backwards to get most up-to-date posts first
    with FileReadBackwards(SCRAPED_POSTS_FILE, encoding="utf-8") as spf:
        for line in spf:
            line = line.replace('\n', '')
            # Lines are of form creation,id
            # Get each argument to check with our current post
            line_created, line_copy = line.split(",")
            # If we've found a match then stop
            ## print line_copy + " == " + post_id + " -> " + str(line_copy == post_id)
            if line_copy == post_id:
                ## print "Match found for " + post_id
                return 1
            # If we've gotten past SEARCH_BACK_DAYS days then stop
            ## print str(line_created) + " < " + str(post_date_check) + " -> " + str(float(line_created) < post_date_check)
            if float(line_created) < post_date_check:
                ## print "Looked past " + str(SEARCH_BACK_DAYS) + " days for " + post_id
                return 0
    # If there is no match then the current post hasn't been scraped.
    ## print "No match found for " + post_id
    return 0
Ejemplo n.º 18
0
def build_gem5_stat_dict(file, stat_list=[], read_from_file=True):
    """ Build a dict of stats from the Gem5 stats.txt """
    stats = defaultdict(lambda: [0] * 64)
    if read_from_file:
        with FileReadBackwards(file, encoding="utf-8") as sf:
            for line in sf:
                if line.strip() == "":
                    continue
                elif "End Simulation Statistics" in line:
                    stats = defaultdict(lambda: [0] * 64)
                elif "Begin Simulation Statistics" in line:
                    return stats
                else:
                    stat = []
                    sstr = re.sub('\s+', ' ',
                                  line).strip().split("#")[0].strip()
                    if ('-----' in sstr):
                        continue
                    elif (sstr == ''):
                        continue
                    elif ('|' in sstr):
                        # Ruby Stats
                        l = []
                        for i in sstr.split('|')[1:]:
                            l.append(i.strip().split(' '))
                        stat.append("ruby_multi")
                        stat.append(l)
                    else:
                        stat.append("single")
                        stat.append(sstr.split(' ')[1])
                    stats[sstr.split(' ')[0]] = stat
    else:
        for sl in stat_list[::-1]:
            for line in sl.split("\n"):
                if line.strip() == "":
                    continue
                elif "End Simulation Statistics" in line:
                    stats = defaultdict(lambda: [0] * 64)
                elif "Begin Simulation Statistics" in line:
                    return stats
                else:
                    stat = []
                    sstr = re.sub('\s+', ' ',
                                  line).strip().split("#")[0].strip()
                    if ('-----' in sstr):
                        continue
                    elif (sstr == ''):
                        continue
                    elif ('|' in sstr):
                        # Ruby Stats
                        l = []
                        for i in sstr.split('|')[1:]:
                            l.append(i.strip().split(' '))
                        stat.append("ruby_multi")
                        stat.append(l)
                    else:
                        stat.append("single")
                        stat.append(sstr.split(' ')[1])
                    stats[sstr.split(' ')[0]] = stat
    return stats
Ejemplo n.º 19
0
    def _send_email(self, subject):
        """
        Méthode permettant d'envoyer un mail
        """

        try:
            # Lecture des logs du backup
            log_lines = []
            with FileReadBackwards(self.loggerfile, encoding='utf-8') as log_file:
                for line in log_file:
                    log_lines.insert(0, line)
                    if "Starting backup" in line:
                        break

            # Constitution du message
            body = "Backup Report : \n" + '\n'.join(log_lines)
            message = 'Subject: {}\n\n{}'.format(subject, body)


            # Envoi du mail
            with smtplib.SMTP_SSL(self.smtphost, self.smtpport) as smtp:
                smtp.login(self.emailsender, self.passsender)
                
                smtp.sendmail(self.emailsender, [self.emaildest], message)

        except Exception as e:
            self.logger.error(str(e))
Ejemplo n.º 20
0
def checkOutput(path, config='single'):
    '''
    The config variable accepts either the value "single" or
    the value "binary"
    '''

    hasFinished = False
    keyword1 = 'stop because'
    keyword2 = 'termination'
    line_counter = 1

    if config == 'single':
        name = path[-20:]
    elif config == 'binary':
        idx = path.index('bin')
        name = path[idx:]
    else:
        raise ValueError('config must be set either to single or binary!')

    with FileReadBackwards(f'{path}/condor.out') as file:
        for line in file:
            # check only the last 20 lines
            if line_counter <= 20:
                if line.startswith(f'{keyword1}') or line.startswith(
                        f'{keyword2}'):
                    current_status = f'Model {name} has been terminated!'
                    hasFinished = True
                    break
                else:
                    current_status = f'Model {name} still running!'
                line_counter += 1
            else:
                break
    return hasFinished, current_status
Ejemplo n.º 21
0
def findXInLog(
        m="/Users/Benjamin/Documents/crpsp/software_source/test00/log.txt"):
    """
    <--- fonctionne --->
    This function will determine using the log, what is the last file that was created.
    This will allow the encryption protocol to determine which file to use.
    """
    last_item_index = []  # liste vide
    with FileReadBackwards(m) as LogFile:  # Opens the log file (read backwds)
        for line in (
                LogFile):  # Searched every line from the bottom for S or D
            if line[0] == "X":  # If we find the last created file
                i = int(line[4]) * 10  # id does not include "C", "D" operator.
                i += int(line[5])  # id does not include "C", "D" operator.
                print("The i value is: ", i)
                last_item_index.append(i)
                j = int(line[7]) * 10  # id does not include "C", "D" operator.
                j += int(line[8])  # id does not include "C", "D" operator.
                last_item_index.append(j)
                print("The j value is: ", j)
                k = int(
                    line[10]) * 10  # id does not include "C", "D" operator.
                k += int(line[11])  # id does not include "C", "D" operator.
                last_item_index.append(k)
                print("The k value is: ", k)
                return last_item_index  # ex: "99-48-74.xml"
                break
Ejemplo n.º 22
0
    def getAccessLogs(self):
        accessLogsList = []
        store = open(self.meta['storePath'],
                     'rb')  #reading the previous persistently stored variable.
        cumMetrics = pickle.load(store)
        store.close()

        isset = False
        try:
            with FileReadBackwards(
                    self.meta['accessLogPath'], encoding='utf-8'
            ) as f:  # reading the file backwards till the lines which are generated in the last one minute.
                for line in f:
                    accessLogData = {}
                    for value in line.split('?'):
                        pair = value.split('*')
                        accessLogData[pair[0].strip()] = pair[1]
                    # we parse the access,
                    #Converting time to unix timestamp format
                    timestamp = datetime.strptime(
                        accessLogData['time_local'],
                        '%d/%b/%Y:%H:%M:%S %z').timestamp()
                    if not isset:
                        new_time_stamp = timestamp
                        isset = True
                    if timestamp <= cumMetrics[
                            'timeStamp']:  ## Reading till last read log
                        break
                    accessLogsList.append(accessLogData)
            self.cum['timeStamp'] = new_time_stamp
            self.extractMetrics(accessLogsList, new_time_stamp)
        except:
            logger.log("Error while parsing the access logs of nginx")
Ejemplo n.º 23
0
    def read_sensor_file(
            sensor_name: str,
            minutes_to_read: int = 0) -> List[Tuple[datetime, float]]:

        log_file_path = SensorLogger.get_sensor_logfile_path(sensor_name)
        if not log_file_path.exists():
            return []
        current_time = datetime.now()
        time_value_pairs: List[Tuple[datetime, float]] = []
        try:
            with FileReadBackwards(str(log_file_path), encoding="utf-8") as fp:
                # log has format 2021-03-12 18:51:16=55\n...
                for line in fp:
                    time_value_pair = line.split("=")
                    timestamp = datetime.fromisoformat(time_value_pair[0])
                    if not minutes_to_read:
                        time_value_pair[1] = float(
                            time_value_pair[1])  # always cast to float
                        return [time_value_pair]
                    if (current_time -
                            timestamp) > timedelta(minutes=minutes_to_read):
                        break
                    time_value_pairs.append(
                        (timestamp, float(time_value_pair[1].strip())))
        except:
            # try to delete when file is corrupted
            delete_log_file(log_file_path)
        return time_value_pairs
Ejemplo n.º 24
0
def get_data():
    """
    Takes data from cache if possible, else reads file
    /var/log/apt/history.log in reverse order using the
    file_read_backwards library. Using the file structure reads one
    block at a time and, if this is a system update, saves in the cache
    the unix time of the beginning and the ending as json object of
    type {"start_time": ... , "end_time": ...} and returns it

    """
    cache_data = check_cache()
    if cache_data is not None:
        return cache_data
    time.sleep(5)
    with FileReadBackwards(file_name, 'utf-8') as file:
        update_time = None
        while True:
            block = read_block(file)
            if not block:
                break
            update_time = process_block(block)
            if update_time is not None:
                break
    if update_time is None:
        result = json.dumps({'error': 'can not get information about last system update'})
    else:
        result = json.dumps({'start_time': update_time['start'],
                             'end_time': update_time['end']})
        update_cache(result)

    return result
Ejemplo n.º 25
0
 def __init__(self):
     """
         position: degrees of antenna's base rotated by motor
         counter_for_overlap: counter to check for overlap
         overlap_thress: maximun degrees that antenna is able to rotate = 360 + overlap
     """
     if Antenna.__instance is not None:
         raise Exception("This class is a singleton!")
     else:
         try:
             self.paths = paths.Paths()
             file_name = "{dir}/{filename}".format(dir="Logs",
                                                   filename='adcs.log')
             with FileReadBackwards(file_name,
                                    encoding="utf-8") as log_file:
                 for line in log_file:
                     position = line.decode().split(',')[0]
                     counter = line.decode().split(',')[1]
                     theta_antenna_pointing = line.split(',')[2]
                     theta = line.split(',')[3]
                     break
         except:
             #@TODO change the default init position and counter
             position = 0
             counter = 0
             theta_antenna_pointing = 0
             theta = 0
         self.position = position
         self.counter_for_overlap = counter
         self.theta = theta
         self.theta_antenna_pointing = theta_antenna_pointing
         self.overlap_thress = 380
         self.sign_for_counter = +1
         self.angle_plot = 0
         Antenna.__instance = self
Ejemplo n.º 26
0
def read_log():
    last_dt = None
    with FileReadBackwards('/var/log/apt/history.log',
                           encoding="utf-8") as frb:
        for line in frb:
            if line.startswith("End-Date"):
                last_dt = datetime.strptime(line[10:], "%Y-%m-%d  %H:%M:%S")
            if line.endswith("apt-get upgrade") or line.endswith(
                    "apt-get dist-upgrade"):
                if not last_dt:
                    # обработка возможности запроса во время скачивания обновлений
                    response = {
                        "data": {
                            "timestamp":
                            int(datetime.timestamp(datetime.now()))
                        }
                    }
                else:
                    response = {
                        "data": {
                            "timestamp": int(last_dt.timestamp())
                        }
                    }
                return response
    return None
Ejemplo n.º 27
0
def tail(filename, pattern, maxlines=60):
    global restartreason
    waitcount = 0
    while True:
        with FileReadBackwards(filename, encoding="utf-8") as frb:
            lines = 0
            for l in frb:
                lines += 1
                #I dont want to scan old logs. So break this loop if before we go too deep.
                if lines > maxlines:
                    continue
                if 'Totals' in l:
                    hashrate = re.search(pattern, l)
                    if hashrate:
                        return hashrate.group(1)
        print(bcolors.WARNING +
              "No 60s hash found yet. Waiting for that to appear.. ({})".
              format(waitcount) + bcolors.ENDC)
        waitcount += 1
        if waitcount > 31:
            print(bcolors.FAIL +
                  "Waited too long for an average hash! Kill it all." +
                  bcolors.ENDC)
            restartreason += "{} - Timeout waiting for 60s Hash".format(now)
            restarttime()
            waitcount = 0
        time.sleep(10)
Ejemplo n.º 28
0
 def lines(self):
     """
     :return: Generator of metrics.log lines
     """
     for filename in self.filenames():
         with FileReadBackwards(filename, encoding="utf-8") as open_file:
             for line in open_file:
                 yield line
Ejemplo n.º 29
0
def searchOutput(path):

    with FileReadBackwards(path+'/condor.out') as file:
        for line in file:
            if line.startswith('save ' + path + '/photos/') or line.startswith('save photos/'):
                words = re.split('/|, | ', line)
                break
    return words[-4], words[-1]
Ejemplo n.º 30
0
def get_log(file):
    with FileReadBackwards(file, encoding="utf-8") as frb:
        lines = []
        while len(lines) < 1000:
            line = frb.readline()
            if not line:
                break
            lines.append(line)
    return "<br/>".join(lines)