Exemplo n.º 1
0
def dologs():
    """Process the last weeks worth of logs, build a digested version"""
    # format: time, type, version, client, id, lat, long
    # where
    #    time = time of the call
    #    type = init (new), revisit (init with prev), travel (a travel time request)
    #    version = client version of nextquery
    #    id = a unique id (for tracking multiple use by clients, without recording ip address, etc.)
    #    lat, long = only present if this is a travel request.
    weekago = totimestamp(datetime.now() - timedelta(days=7))
    countaccess = 0
    counterror = 0
    countnew = 0
    output = StringIO.StringIO()

    print >> output, "Time,Type,NFVersion,ID,Lat,Long\n"

    for record in logservice.fetch(start_time=weekago):
        urlbits = record.resource.split('/')

        if len(urlbits) < 2:
            continue

        calltype = urlbits[1]
        version = ("" if len(urlbits)<3 else urlbits[2])
        param = ("" if len(urlbits)<4 else urlbits[3])

        if calltype in ("_ah", "tasks"):
            continue # we don't care about these

        if calltype == "init" and param != "":
            calltype = "revisit"

        if calltype == "init":
            countnew+=1
        countaccess+=1

        outputtime = datetime.fromtimestamp(record.start_time).strftime("%Y-%m-%d %X %a")
        print >>output, outputtime, ",", calltype, ",",version, ",", getId(record.ip),
        if calltype == "traveltimes":
            print >>output, ",", param
        else:
            print >>output, ",,"


    for record in logservice.fetch(start_time=weekago,minimum_log_level=logservice.LOG_LEVEL_WARNING):
        urlbits = record.resource.split('/')

        if urlbits[1] in ("init","traveltimes","tasks"):
            counterror+=1

    print >> output, "\nAccesses: ", countaccess
    print >> output, "Unique: ", len(ipmap.keys())
    print >> output, "New: ", countnew
    print >> output, "Errors: ", counterror

    return output.getvalue()
Exemplo n.º 2
0
def getapplogs(start_offset, how_many):
    
    end_time = time.time()
    i=0
    s=''
    end_offset=start_offset+how_many
    for req_log in logservice.fetch(end_time=end_time, offset=None,
                                    minimum_log_level=logservice.LOG_LEVEL_INFO,
                                    include_app_logs=True):
        
        for app_log in req_log.app_logs:
            if i in range(start_offset,end_offset):
                lmessage=app_log.message
                if (True == lmessage.startswith('%lg%')):
                    lmessage=re.sub('%lg%','',lmessage)
                    s=("""%s
%s""" %(lmessage,s))
            elif (i>end_offset):
                break
            i+=1
    
    if (''==s ):
        print 'No logging message specified<br />'
    
    return s
Exemplo n.º 3
0
def getapplogs(start_offset, how_many):

    end_time = time.time()
    i = 0
    s = ''
    end_offset = start_offset + how_many
    for req_log in logservice.fetch(
            end_time=end_time,
            offset=None,
            minimum_log_level=logservice.LOG_LEVEL_INFO,
            include_app_logs=True):

        for app_log in req_log.app_logs:
            if i in range(start_offset, end_offset):
                lmessage = app_log.message
                if (True == lmessage.startswith('%lg%')):
                    lmessage = re.sub('%lg%', '', lmessage)
                    s = ("""%s
%s""" % (lmessage, s))
            elif (i > end_offset):
                break
            i += 1

    if ('' == s):
        print 'No logging message specified<br />'

    return s
Exemplo n.º 4
0
def get_logs(log_ids, log_level):
    for request_log in logservice.fetch(minimum_log_level=log_level,
                                        include_incomplete=True,
                                        include_app_logs=True,
                                        request_ids=log_ids):

        d = {name: getattr(request_log, name)
            for name, val in request_log.__class__.__dict__.iteritems()
            if isinstance(val, property) and not name.startswith('_')
        }
        d['start_time'] = datetime.datetime.fromtimestamp(
            request_log.start_time)
        if request_log.end_time:
            d['end_time'] = datetime.datetime.fromtimestamp(
                request_log.end_time)
            d['duration'] = (d['end_time'] - d['start_time']).total_seconds()

        d['app_logs'] = [{
                'time': datetime.datetime.fromtimestamp(app_log.time),
                'level': app_log.level,
                'message': app_log.message
            }
            for app_log in d['app_logs']
            if app_log.level >= log_level
        ]
        yield d
Exemplo n.º 5
0
 def do(self, proc):
       proc = str( proc )
       try:
         offset    = self.request.get('offset') or None
         if offset:
            offset = urlsafe_b64decode(str(offset))
       except:
         offset    = None
       end_time    = time()
       count       = 5
       show_next   = False
       last_offset = None
       i           = 1
       for req_log in logservice.fetch(end_time=end_time, offset=offset, 
       minimum_log_level=logservice.LOG_LEVEL_INFO, include_app_logs=True):
         ip        = str( req_log.ip )
         rq        = str( req_log )
         meth      = str( req_log.method )
         rec       = str( req_log.resource )
         if rec   == '/celtic.png':
            self.response.headers['Content-Type'] = 'image/png'
            a      = open( 'celtik.png', 'rb' )
            for e in a:
               self.response.out.write( e )
         if not rec == '/celtic.png':
            logging.info( 'Bad Request:  %s %s' % ( proc, rec ) )
Exemplo n.º 6
0
    def get(self):
        end_time = time.time()
        start_time = end_time - 6 * 60 * 60  # 6 hours before now- same as cronjob interval.
        html = ''
        report_needed = False
        for req_log in logservice.fetch(start_time=start_time, end_time=end_time,
                                        minimum_log_level=logservice.LOG_LEVEL_ERROR, include_app_logs=True):
            report_needed = True
            html += '%s - %s <br/>\n' % (req_log.resource, req_log.method)
            html += '%s <br/>\n' % req_log.ip
            html += '<br/>\n'

            for app_log in req_log.app_logs:
                html += 'Date: %s' % datetime.datetime.fromtimestamp(app_log.time).strftime('%D %T UTC')
                html += '<b><pre>%s</pre></b><br/>\n' % app_log.message

            html += '<hr>\n'

        if report_needed:
            mail.send_mail(sender="*****@*****.**",
                           to='*****@*****.**',
                           subject='Whaler Errors',
                           body=html,
                           html=html)
        self.response.out.write(html)
    def get(self):
        logging.info('Starting Main handler')
        # Get the incoming offset param from the Next link to advance through
        # the logs. (The first time the page is loaded, there won't be any offset.)
        try:
            offset = self.request.get('offset') or None
            if offset:
                offset = base64.urlsafe_b64decode(str(offset))
        except TypeError:
            offset = None

        # Set up end time for our query.
        end_time = time.time()

        # Count specifies the max number of RequestLogs shown at one time.
        # Use a boolean to initially turn off visiblity of the "Next" link.
        count = 5
        show_next = False
        last_offset = None

        # Iterate through all the RequestLog objects, displaying some fields and
        # iterate through all AppLogs beloging to each RequestLog count times.
        # In each iteration, save the offset to last_offset; the last one when
        # count is reached will be used for the link.
        i = 0
        for req_log in logservice.fetch(end_time=end_time, offset=offset,
                                        minimum_log_level=logservice.LOG_LEVEL_INFO,
                                        include_app_logs=True):
            self.response.out.write('<br /> REQUEST LOG <br />')
            self.response.out.write(
                'IP: %s <br /> Method: %s <br /> Resource: %s <br />' %
                (req_log.ip, req_log.method, req_log.resource))
            self.response.out.write(
                'Date: %s<br />' %
                #datetime.datetime.fromtimestamp(req_log.end_time).strftime('%D %T UTC'))
                str(req_log.end_time))

            last_offset= req_log.offset
            i += 1

            for app_log in req_log.app_logs:
                self.response.out.write('<br />APP LOG<br />')
                self.response.out.write(
                    'Date: %s<br />' %
                    #datetime.datetime.fromtimestamp(app_log.time).strftime('%D %T UTC'))
                    str(req_log.end_time))
                self.response.out.write('<br />Message: %s<br />' % app_log.message)

            if i >= count:
                show_next = True
                break

        # Prepare the offset URL parameters, if any.
        if show_next:
            query = self.request.GET
            query['offset'] = base64.urlsafe_b64encode(last_offset)
            next_link = urllib.urlencode(query)
            self.response.out.write('<a href="/rest/logHandler?%s">Next</a>' % next_link)
Exemplo n.º 8
0
def do_not_run_test_logservice_fetch(request_id):
    """This test fails at logservice.fetch"""
    logging.info('TESTING')
    found_log = False
    for req_log in logservice.fetch(request_ids=[request_id],
                                    include_app_logs=True):
        for app_log in req_log.app_logs:
            if app_log.message == 'TESTING':
                found_log = True

    assert found_log
Exemplo n.º 9
0
def get_logs(offset=None):
    # Logs are read backwards from the given end time. This specifies to read
    # all logs up until now.
    end_time = time.time()

    logs = logservice.fetch(end_time=end_time,
                            offset=offset,
                            minimum_log_level=logservice.LOG_LEVEL_INFO,
                            include_app_logs=True)

    return logs
Exemplo n.º 10
0
def get_logs(offset=None):
    # Logs are read backwards from the given end time. This specifies to read
    # all logs up until now.
    end_time = time.time()

    logs = logservice.fetch(
        end_time=end_time,
        offset=offset,
        minimum_log_level=logservice.LOG_LEVEL_INFO,
        include_app_logs=True)

    return logs
Exemplo n.º 11
0
 def get_logs(offset=None, log_level=1):
     """ 依目前的時間取得記錄 """
     LOG_LEVELS = [
         logservice.LOG_LEVEL_DEBUG, logservice.LOG_LEVEL_INFO,
         logservice.LOG_LEVEL_WARNING, logservice.LOG_LEVEL_ERROR,
         logservice.LOG_LEVEL_CRITICAL
     ]
     end_time = time.time()
     return logservice.fetch(end_time=end_time,
                             offset=offset,
                             minimum_log_level=LOG_LEVELS[log_level],
                             include_app_logs=True)
Exemplo n.º 12
0
def countapplogs():
    
    end_time = time.time()
    count=0
    
    for req_log in logservice.fetch(end_time=end_time, offset=None,
                                    minimum_log_level=logservice.LOG_LEVEL_INFO,
                                    include_app_logs=True):
        
        for app_log in req_log.app_logs:
            lmessage=app_log.message
            if (True == lmessage.startswith('%lg%')):
                count+=1
    
    return count
Exemplo n.º 13
0
def countapplogs():

    end_time = time.time()
    count = 0

    for req_log in logservice.fetch(
            end_time=end_time,
            offset=None,
            minimum_log_level=logservice.LOG_LEVEL_INFO,
            include_app_logs=True):

        for app_log in req_log.app_logs:
            lmessage = app_log.message
            if (True == lmessage.startswith('%lg%')):
                count += 1

    return count
Exemplo n.º 14
0
def readclienthistory(stream):
    """Write client access data from logs"""
    # format: time, req-type, ip, lat, long
    # where req-type = {1=new_init, 2=re_init, 3=travel_time}
    # and lat, long are empty unless this is a travel request
    for record in logservice.fetch():
        request_type = record.resource # URL string
        
        if '/init' in request_type:
            isnew = (1 if re.search('/[0-9]',request_type) == None else 2)
            stream.write("%d, %d, %s,,\n" % (record.start_time, isnew, record.ip))
        elif '/travel' in request_type:
            mob = re.search('/([0-9.-]+),([0-9.-]+)',request_type)
            (l1,l2) = ((mob.group(1),mob.group(2)) if mob != None else ('',''))
            stream.write("%d, 3, %s, %s, %s\n" % (record.start_time, record.ip, l1, l2))
        else:
            pass # ignore all other message types
Exemplo n.º 15
0
 def _get_log_messages(self, offset, count,
                       min_level=logservice.LOG_LEVEL_DEBUG):
   logs = []
   index = 1
   for log in logservice.fetch(end_time=time.time(), offset=None,
                               minimum_log_level=min_level,
                               include_app_logs=True):
     if log.offset == offset:
       break
     log.timestamp = datetime.datetime.fromtimestamp(log.start_time)
     for app_log in log.app_logs:
       app_log.timestamp = datetime.datetime.fromtimestamp(app_log.time)
       app_log.level_name = LOG_LEVELS[app_log.level]
     logs.append(log)
     index += 1
     if index > count:
       break
   return logs
Exemplo n.º 16
0
	def get(self):
		if not isUserAdmin(self):
			self.session[LOGIN_NEXT_PAGE_KEY] = self.URL
			self.redirect("/")
			return
		now = int(time.mktime(time.gmtime()))
		# Fetch 24 hours of logs
		start = now - 3600 * 24
		logs = fetch(start_time=start, end_time=now, minimum_log_level=logservice.LOG_LEVEL_INFO, include_incomplete=False, include_app_logs=True)
		messages = []
		for log in logs:
			for appLog in log.app_logs:
				message = unicode(appLog.message.strip(codecs.BOM_UTF8), 'utf-8')
				if message.startswith("2013"):
					messages.append(message)
		template_values = {
			'messages':messages
		}
		template = jinja_environment.get_template('templates/log/logLines.html')
		self.printPage("Logs", template.render(template_values), False, False)
Exemplo n.º 17
0
def get_logs(log_ids, log_level):
    for request_log in logservice.fetch(minimum_log_level=log_level,
                                        include_incomplete=True,
                                        include_app_logs=True,
                                        request_ids=log_ids):

        d = {
            name: getattr(request_log, name)
            for name, val in request_log.__class__.__dict__.iteritems()
            if isinstance(val, property) and not name.startswith('_')
        }
        d['start_time'] = datetime.datetime.fromtimestamp(
            request_log.start_time)
        if request_log.end_time:
            d['end_time'] = datetime.datetime.fromtimestamp(
                request_log.end_time)
            d['duration'] = (d['end_time'] - d['start_time']).total_seconds()

        d['app_logs'] = [{
            'time': datetime.datetime.fromtimestamp(app_log.time),
            'level': app_log.level,
            'message': app_log.message
        } for app_log in d['app_logs'] if app_log.level >= log_level]
        yield d
Exemplo n.º 18
0
    def get(self):
        end_date = self.request.get('end_date', None)
        end_time = self.request.get('end_time', None)

        start_date = self.request.get('start_date', None)
        start_time = self.request.get('start_time', None)
        count = self.request.get('count', None)
        offset = None

        end_time = time.time() if end_time is None else float(end_time)
        count = None if count is None else int(count)

        if end_date:
            end_time = time.mktime(time.strptime(end_date, "%Y%m%d"))

        if start_date:
            start_time = time.mktime(time.strptime(start_date, "%Y%m%d"))

        class MagicEncoder(json.JSONEncoder):
            def default(self, obj):
                use_dict = [logservice.AppLog, logservice.log_service_pb.RequestLog,
                            logservice.log_service_pb.LogLine, logservice.log_service_pb.LogOffset]
                skip = [logservice.RequestLog]
                if any(isinstance(obj, x) for x in use_dict):
                    return obj.__dict__
                elif any(isinstance(obj, x) for x in skip):
                    o = {}
                    attributes = [
                        'api_mcycles',
                        'app_engine_release',
                        'app_id',
                        'app_logs',
                        'combined',
                        'cost',
                        'end_time',
                        'finished',
                        'host',
                        'http_version',
                        'instance_key',
                        'ip',
                        'latency',
                        'mcycles',
                        'method',
                        'module_id',
                        'nickname',
                        'offset',
                        'pending_time',
                        'referrer',
                        'replica_index',
                        'request_id',
                        'resource',
                        'response_size',
                        'start_time',
                        'status',
                        'task_name',
                        'task_queue_name',
                        'url_map_entry',
                        'user_agent',
                        'version_id',
                        'was_loading_request']

                    for a in attributes:
                        f = getattr(obj, a)
                        if f:
                            if callable(f):
                                o[a] = f()
                            else:
                                o[a] = f
                    return o
                elif 'Lock' in str(type(obj)):
                    return {}
                else:
                    return json.JSONEncoder.default(self, obj)

        #wish json could stream, till then this will hog RAM
        log_records = []
        log_records_gen = logservice.fetch(end_time=end_time,
                                           offset=offset,
                                           minimum_log_level=logservice.LOG_LEVEL_INFO,
                                           include_app_logs=True)

        counter = 0
        for idx, log_record in enumerate(log_records_gen):
            if start_time and log_record.start_time < start_time:
                continue

            log_records.append(log_record)

            if count and counter >= count:
                break

            counter += 1

        self.response.headers['Content-Type'] = 'application/json'
        self.response.out.write(json.dumps(log_records, indent=True, cls=MagicEncoder))
Exemplo n.º 19
0
def index():
    loglines = []
    req_logs = logservice.fetch(minimum_log_level=logservice.LOG_LEVEL_INFO,
                include_app_logs=True, include_incomplete=True)
    req_logs= list(itertools.islice(req_logs, 0, 10))
    return render_template('hello.html', req_logs=req_logs)
Exemplo n.º 20
0
    def fetch_logs(self,
                   time_period,
                   save_to_file=False,
                   send_to_es=False,
                   send_to_udp=False):
        f = lambda: (self.username, self.password)

        logger.info("Successfully authenticated")

        logger.info("Fetching logs from %s to %s (GAE TZ)" %
                    (time_period['start_human'], time_period['end_human']))

        end = time_period['end']
        start = time_period['start']
        start_human = time_period['start_human']

        intervals = _split_time_period(start, end)

        i = 0

        for interval in intervals:
            try:
                start, end, start_human = interval
                index_name = start_human.strftime('%Y.%m.%d')
                dest = '%s-%s.log' % (app_name,
                                      start_human.strftime('%Y-%m-%d'))

                logger.info("Interval : %s - %s %s" %
                            (start, end, start_human))

                save_recovery_log(start)
                lines = []
                offset = None
                remote_api_stub.ConfigureRemoteApiForOAuth(
                    self.app_name + '.appspot.com', '/remote_api')

                for req_log in logservice.fetch(
                        end_time=end,
                        start_time=start,
                        minimum_log_level=logservice.LOG_LEVEL_INFO,
                        version_ids=self.version_ids,
                        include_app_logs=True,
                        include_incomplete=True,
                        offset=offset):

                    logger.debug("Retrieved - %s" % req_log.combined)

                    lines.append(self._prepare_json(dest, req_log))

                    i = i + 1
                    if i % 100 == 0:
                        logger.info("Fetched %d req logs so far" % i)

                    # end fetch
                if lines:
                    if send_to_es:
                        self.redis_transports.send_to_es(
                            index_name, dest, lines)
                    else:
                        self.redis_transports.callback(dest, lines)

                    #if send_to_udp:
                    #    self.redis_transports.send_to_udp(dest, lines, self.udp_host, self.udp_port)

                if save_to_file:
                    f = file(os.path.join(save_to_file, dest), 'a')
                    f.write('\n'.join([x['line'] for x in lines]))
                    f.close()

                if send_to_es:
                    logger.info("Save to es %s", len(lines))
                else:
                    logger.info("Save to redis %s", len(lines))
                # end interval
            except KeyboardInterrupt:
                return
            except:
                logger.error("Something went wrong", exc_info=True)
                continue

        logger.info("Retrieved %d logs. Done." % i)

        return ""
Exemplo n.º 21
0
  def get(self):
    logging.info('Starting Main handler')
    # Get the incoming offset param from the Next link to advance through
    # the logs. (The first time the page is loaded, there won't be any offset.)
    start_time_set=False
    try:
      offset = self.request.get('offset') or None
      if offset:
        offset = base64.urlsafe_b64decode(str(offset))
    except TypeError:
      offset = None
    try:
      start_time = self.request.get('start_time') or None
      if start_time:
        start_time = float(base64.urlsafe_b64decode(str(start_time)))
        start_time_set=True
    except TypeError:
      start_time = None
      start_time_set=False
    try:
      filter = str(self.request.get('filter')) or None
    except TypeError:
      filter = None

    # Set up end time for our query.

    # Count specifies the max number of RequestLogs shown at one time.
    # Use a boolean to initially turn off visiblity of the "Next" link.
    count = 1000
    show_next = True
    last_offset = 5000

    dt=datetime.datetime.now()
    tt=dt.timetuple()
    year=tt[0]
    month=tt[1]
    ttt=time.strptime((("01 %s %s") % (month,year)), "%d %m %Y") 
    
    if not start_time_set:
      end_time = time.time()
      start_time = time.mktime(ttt)
    else:
      dt2=datetime.datetime.utcfromtimestamp(float(start_time))
      tt2=dt2.timetuple()
      year2=tt2[0]
      month2=tt2[1]
      month2=month2+1
      if month2==13:
        month2=1
        year2=year2+1
      ttt2=time.strptime((("01 %s %s") % (month2,year2)), "%d %m %Y")
      end_time=time.mktime(ttt2)
    dt3=datetime.datetime.utcfromtimestamp(float(start_time))
    tt3=dt3.timetuple()
    year3=tt3[0]
    month3=tt3[1]
    month3=month3-1
    if month3==0:
      month3=12
      year3=year3-1
    ttt3=time.strptime((("01 %s %s") % (month3,year3)), "%d %m %Y")
    start_time_next=time.mktime(ttt3)
    # Iterate through all the RequestLog objects, displaying some fields and
    # iterate through all AppLogs beloging to each RequestLog count times.
    # In each iteration, save the offset to last_offset; the last one when
    # count is reached will be used for the link.
    i = 0
    for req_log in logservice.fetch(start_time=start_time,end_time=end_time, offset=offset,
                                    minimum_log_level=logservice.LOG_LEVEL_INFO,
                                    include_app_logs=False):
      referrer=req_log.referrer
      ip=req_log.ip
      if filter and referrer and not string.find(referrer, filter) == -1: 
#      self.response.out.write("<br /> REQUEST LOG <br />")
#      self.respons
        self.response.out.write("""<a href=\"/logs3?filter=%s\">IP</a>  %s <br />""" % (ip, req_log.combined))
        i += 1
      else:
        if not filter:
          self.response.out.write("""<a href=\"/logs3?filter=%s\">IP</a>  %s <br />""" % (ip, req_log.combined))
          i += 1
#      self.response.out.write("""IP: %s <br /> Method: %s <br />
#                       Resource: %s <br />""" % (req_log.ip,
#                       req_log.method, req_log.resource))
#      self.response.out.write("Date: "+datetime.datetime.fromtimestamp(req_log.end_time).strftime('%D %T UTC') +"<br />")

      last_offset= req_log.offset
      

      for app_log in req_log.app_logs:
        self.response.out.write("<br />APP LOG<br />")
        self.response.out.write("Date: "+datetime.datetime.fromtimestamp(app_log.time).strftime('%Y-%m-%d %H:%M:%S UTC') +"<br />")
        self.response.out.write("<br />Message: "+app_log.message+"<br />")

      if i >= count:
        show_next = True
        break

    # Prepare the offset URL parameters, if any.
    if show_next:
      query = self.request.GET
      query['offset'] = base64.urlsafe_b64encode(last_offset)
      query['start_time'] = base64.urlsafe_b64encode(("%s")%(start_time_next))
      next_link = urllib.urlencode(query)
      self.response.out.write("<a href=\"/logs2?"+next_link+"\">Next</a>")
      self.response.out.write("<br />")
Exemplo n.º 22
0
def _offload_logs(offload_run_key):
    to_be_saved = list()
    write_queue = Queue()
    def writer():
        current_date = None
        gcs_fh = None
        while True:
            packet = write_queue.get()
            if packet is None:
                if gcs_fh:
                    gcs_fh.close()
                return  # Exit thread
            date, buf, offset, to_be_saved = packet
            if current_date is None or current_date != date:
                if gcs_fh:
                    gcs_fh.close()
                current_date = date
                gcs_fh = _get_bucket_file_handle(date)
            gcs_fh.write(buf.getvalue())
            if offset is not None:
                offload_run = db.get(offload_run_key)
                offload_run.offset = offset
                to_be_saved.append(offload_run)
            if to_be_saved:
                db.put(to_be_saved)
    done = False
    offload_run = db.get(offload_run_key)
    offset = offload_run.offset
    thread = threading.Thread(target=writer)
    thread.daemon = True
    thread.start()
    try:
        buf_date = None
        buf = StringIO()
        start = now()
        for requestLog in logservice.fetch(offset=offset, minimum_log_level=logservice.LOG_LEVEL_INFO,
                                           include_incomplete=False, include_app_logs=True):
            if offset is None:
                # This is the first request
                # ==> Store the request id
                offset_settings = OffloadSettings.get_instance()
                offset_settings.until_request_id = requestLog.request_id
                to_be_saved.append(offset_settings)
            elif requestLog.request_id == offload_run.until_request_id:
                if buf_date:
                    write_queue.put((buf_date, buf, None, to_be_saved))
                    to_be_saved = list()
                    db.delete(offload_run_key)  # This job is done
                done = True
                break
            offset = requestLog.offset
            date = datetime.datetime.fromtimestamp(requestLog.start_time).date()
            if not buf_date:
                buf_date = date
            elif date != buf_date:
                if buf.tell() > 0:
                    write_queue.put((buf_date, buf, offset, to_be_saved))
                    to_be_saved = list()
                    buf = StringIO()
                buf_date = date
            elif buf.tell() > MAX_BUF_SIZE:
                write_queue.put((buf_date, buf, offset, to_be_saved))
                to_be_saved = list()
                buf = StringIO()
            elif now() - start > 9 * 60:  # Deffered deadline = 10 minutes
                write_queue.put((buf_date, buf, offset, to_be_saved))
                break
            for appLog in requestLog.app_logs:
                if appLog.message and appLog.message.startswith(OFFLOAD_HEADER):
                    buf.write(appLog.message[OFFLOAD_HEADER_LENGTH + 1:])
                    buf.write("\n")
        else:
            if buf.tell() > 0:
                write_queue.put((buf_date, buf, None, to_be_saved))
            done = True
    finally:
        write_queue.put(None)  # Exit writer thread
        thread.join()
    if not done:
        offload_logs(offload_run_key)
Exemplo n.º 23
0
def fetch_logs(time_period,
               recovery_log,
               username,
               password,
               app_name,
               version_ids,
               offset=None,
               dest="/tmp/gae_log.log",
               append=False):
    f = lambda: (username, password)

    try:
        remote_api_stub.ConfigureRemoteApi(None,
                                           REMOTE_API_PATH,
                                           f,
                                           app_name,
                                           secure=True)
    except ConfigurationError:
        # Token expired?
        logger.exception(
            "Token validation failed. Probably expired. Will retry")
        remote_api_stub.ConfigureRemoteApi(None,
                                           REMOTE_API_PATH,
                                           f,
                                           app_name,
                                           secure=True)

    logger.info("Successfully authenticated")

    version_ids = version_ids

    logger.info("Fetching logs from %s to %s (GAE TZ)" %
                (time_period['start_human'], time_period['end_human']))

    end = time_period['end']
    start = time_period['start']

    intervals = _split_time_period(start, end)

    # TODO - move to classes instead of globals
    global last_time_period
    last_time_period = time_period

    i = 0

    if append:
        mode = 'a'
    else:
        mode = 'w'

    f = open(dest, mode)

    try:
        for interval in intervals:
            start, end = interval
            logger.info("Interval : %s - %s" % (start, end))

            for req_log in logservice.fetch(
                    end_time=end,
                    start_time=start,
                    minimum_log_level=logservice.LOG_LEVEL_INFO,
                    version_ids=version_ids,
                    include_app_logs=True,
                    include_incomplete=True,
                    offset=offset):

                logger.debug("Retrieved - %s" % req_log.combined)

                i = i + 1
                if i % 100 == 0:
                    logger.info("Fetched %d req logs so far" % i)

                f.write(_prepare_json(req_log))
                f.write('\n')

                # keeping track in case - if need to resume
                global last_offset
                last_offset = req_log.offset
                # end fetch

            # end interval
    except:
        logger.exception("Something went wrong")
        save_recovery_info()

    logger.info("Retrieved %d logs. Done." % i)

    f.close()
    return ""
Exemplo n.º 24
0
    def fetch_logs(self, time_period, save_to_file=False, send_to_es=False, send_to_udp=False):
        f = lambda: (self.username, self.password)

        
        logger.info("Successfully authenticated")

        logger.info("Fetching logs from %s to %s (GAE TZ)"
                    % (time_period['start_human'], time_period['end_human']))

        end = time_period['end']
        start = time_period['start']
        start_human = time_period['start_human']

        intervals = _split_time_period(start, end)

        i = 0
        
        
        for interval in intervals:
            try:
                start, end, start_human = interval
                index_name = start_human.strftime('%Y.%m.%d')
                dest = '%s-%s.log' % (app_name, start_human.strftime('%Y-%m-%d'))


                logger.info("Interval : %s - %s %s" % (start, end, start_human))

                save_recovery_log(start)
                lines = []
                offset = None
                remote_api_stub.ConfigureRemoteApiForOAuth(self.app_name + '.appspot.com', '/remote_api')
            
                for req_log in logservice.fetch(end_time=end,
                                                start_time=start,
                                                minimum_log_level=logservice.LOG_LEVEL_INFO,
                                                version_ids=self.version_ids,
                                                include_app_logs=True, include_incomplete=True,
                                                offset=offset):

                    logger.debug("Retrieved - %s" % req_log.combined)

                    lines.append(self._prepare_json(dest, req_log))

                    i = i + 1
                    if i % 100 == 0:
                        logger.info("Fetched %d req logs so far" % i)

                    # end fetch
                if lines:
                    if send_to_es:
                        self.redis_transports.send_to_es(index_name, dest, lines)
                    else:
                        self.redis_transports.callback(dest, lines)

                    #if send_to_udp:
                    #    self.redis_transports.send_to_udp(dest, lines, self.udp_host, self.udp_port)

                if save_to_file:
                    f = file(os.path.join(save_to_file, dest), 'a')
                    f.write('\n'.join([x['line'] for x in lines]))
                    f.close()

                if send_to_es:
                    logger.info("Save to es %s", len(lines))
                else:
                    logger.info("Save to redis %s", len(lines))
                # end interval
            except KeyboardInterrupt:
                return
            except:
                logger.error("Something went wrong", exc_info=True)
                continue

        logger.info("Retrieved %d logs. Done." % i)

        return ""
Exemplo n.º 25
0
  def get(self):
    logging.info('Starting Main handler')
    # Get the incoming offset param from the Next link to advance through
    # the logs. (The first time the page is loaded, there won't be any offset.)
    try:
      offset = self.request.get('offset') or None
      if offset:
        offset = base64.urlsafe_b64decode(str(offset))
    except TypeError:
      offset = None

    # Set up end time for our query.
    end_time = time.time()

    # Count specifies the max number of RequestLogs shown at one time.
    # Use a boolean to initially turn off visiblity of the "Next" link.
    count = 500
    show_next = True
    last_offset = None
    url_map_entry = None

    # Iterate through all the RequestLog objects, displaying some fields and
    # iterate through all AppLogs beloging to each RequestLog count times.
    # In each iteration, save the offset to last_offset; the last one when
    # count is reached will be used for the link.
    i = 0
    for req_log in logservice.fetch(end_time=end_time, offset=offset,
                                    minimum_log_level=logservice.LOG_LEVEL_INFO,
                                    include_app_logs=True):

      self.response.out.write("<br /> REQUEST LOG <br />\n")
#      self.respons

#      self.response.out.write("Date: " + datetime.datetime.fromtimestamp(req_log.end_time).strftime('%D %T UTC') + " <br />\n")
      self.response.out.write("Date: " + datetime.datetime.fromtimestamp(req_log.end_time).strftime('%H:%M:%S %Y-%m-%d UTC') + " <br />\n")
#      if req_log.url_map_entry:
#        self.response.out.write((""" <a href=\"%s\">%s</a><br />\n""" % (req_log.url_map_entry, req_log.offset)))
#      else:
#        self.response.out.write(" <br />\n")
      self.response.out.write("""%s <br />\n""" % (req_log.combined))
#      self.response.out.write("""IP: %s <br /> Method: %s <br />
#                       Resource: %s <br />""" % (req_log.ip,
#                       req_log.method, req_log.resource))
#      self.response.out.write("Date: "+datetime.datetime.fromtimestamp(req_log.end_time).strftime('%D %T UTC') +"<br />")

      last_offset= req_log.offset
      i += 1

      for app_log in req_log.app_logs:
        self.response.out.write("APP LOG<br />\n")
#        self.response.out.write("Date: "+datetime.datetime.fromtimestamp(app_log.time).strftime('%D %T UTC') +"<br />\n")
        self.response.out.write("Date: "+datetime.datetime.fromtimestamp(app_log.time).strftime('%H:%M:%S %Y-%m-%d UTC') +"<br />\n")
        self.response.out.write("Message: "+app_log.message+"<br />\n")
        messagetxt = app_log.message
        messarray = messagetxt.split("; ")
        for messvalue in messarray:
           messvalue2=messvalue.split(": ")
           mess = {}
           if len(messvalue2)>1:
             [key,val]=messvalue2[:2]
             mess[key]=val
             if 'link' in mess:
               statslink = ("<a href=\"%s\">%s</a>" % (mess['link'],mess['link']))
               self.response.out.write("<br />STATS DETAILS: %s<br />" % (statslink))
           
      if i >= count:
        show_next = True
        break
    self.response.out.write("<br />\n")
    
    # Prepare the offset URL parameters, if any.
    if show_next:
      query = self.request.GET
      query['offset'] = base64.urlsafe_b64encode(last_offset)
      next_link = urllib.urlencode(query)
      self.response.out.write("<a href=\"/logs?"+next_link+"\">Next</a>")
      self.response.out.write("<br />\n")
Exemplo n.º 26
0
def fetch_logs(time_period, recovery_log, username, password, app_name, version_ids, offset=None, dest="/tmp/gae_log.log",append=False):
    f = lambda : (username, password)

    try:
        remote_api_stub.ConfigureRemoteApi(None, REMOTE_API_PATH, f, app_name)
    except ConfigurationError:
        # Token expired?
        logger.exception("Token validation failed. Probably expired. Will retry")
        remote_api_stub.ConfigureRemoteApi(None, REMOTE_API_PATH, f, app_name)
    
    logger.info("Successfully authenticated")

    version_ids = version_ids

    logger.info("Fetching logs from %s to %s (GAE TZ)" 
            % (time_period['start_human'],time_period['end_human']))
   
    end = time_period['end']
    start = time_period['start']

    intervals = _split_time_period(start,end)
    
    # TODO - move to classes instead of globals
    global last_time_period 
    last_time_period = time_period
   
    i = 0
    
    if append:
        mode = 'a'
    else:
        mode = 'w'
    
    f = open(dest, mode)

    try:
        for interval in intervals:
            start, end = interval
            logger.info("Interval : %s - %s" % (start,end))

            for req_log in logservice.fetch(end_time=end, 
                    start_time=start, 
                    minimum_log_level=logservice.LOG_LEVEL_INFO, 
                    version_ids=version_ids, 
                    include_app_logs=True, include_incomplete=True, 
                    offset=offset):
                
                logger.debug("Retrieved - %s" % req_log.combined)
                
                i = i + 1
                if i % 100 == 0:
                    logger.info("Fetched %d req logs so far" % i)

                f.write(_prepare_json(req_log))
                f.write('\n')

                # keeping track in case - if need to resume
                global last_offset 
                last_offset = req_log.offset
                # end fetch
            
            # end interval
    except:
        logger.exception("Something went wrong")
        save_recovery_info()
    
    logger.info("Retrieved %d logs. Done." % i)

    f.close()
    return ""