def _debug(method, url, params, payload, request_headers, response_headers, response, status_code, elapsed, cached=None): try: if g.app.debug: debug = g.app.debug else: debug = False except NoContextError: debug = True if debug: log_id = string_id(length=6) try: payload = js.loads(payload) payload = js.dumps(payload) except Exception: pass try: response = js.loads(response) response = js.dumps(response) except Exception: pass log.debug('Method: %s' % method + ', URL: %s' % url + ', Params: %s' % params + ' (%s %s)' % (status_code, HTTP_STATUS_CODES[status_code]) + ' Cache: %s' % cached, timer=elapsed, log_id=log_id) for header in request_headers: log.debug('Request Header: %s="%s"' % (header, request_headers[header]), log_id=log_id) for header in response_headers: log.debug('Response Header: %s="%s"' % (header, response_headers[header]), log_id=log_id) log.debug(payload, prepend='Request Payload', log_id=log_id) log.debug(response, prepend='Response Payload', log_id=log_id)
def execute(*args, check=True, virtualenv=False): from luxon import GetLogger log = GetLogger(__name__) loginfo = TemporaryFile() logerr = TemporaryFile() log_id = string_id(length=6) try: env = os.environ.copy() if virtualenv is False: if '__PYVENV_LAUNCHER__' in env: del env['__PYVENV_LAUNCHER__'] log.info("Execute '%s'" % " ".join(args[0]), log_id=log_id) subprocess.run(*args, stdout=loginfo, stderr=logerr, check=True, env=env) loginfo.seek(0) logerr.seek(0) log.info(if_bytes_to_unicode(loginfo.read()), log_id=log_id) log.error(if_bytes_to_unicode(logerr.read()), log_id=log_id) loginfo.seek(0) return if_bytes_to_unicode(loginfo.read()) except subprocess.CalledProcessError as e: logerr.seek(0) if check is True: cmd = " ".join(*args) raise ExecuteError(cmd, if_bytes_to_unicode(logerr.read())) from None loginfo.seek(0) logerr.seek(0) log.info(if_bytes_to_unicode(loginfo.read()), log_id=log_id) log.error(if_bytes_to_unicode(logerr.read()), log_id=log_id) logerr.seek(0) return if_bytes_to_unicode(logerr.read()) finally: loginfo.close() logerr.close()
def chart(chart, c_type="pie", title=None, legend=True, ysuffix=None): chart_id = string_id() if title is None: title = chart.get('title') c_type = chart.get('type', c_type) chart_labels = chart.get('labels') legend_data = [] for no, chart_label in enumerate(chart_labels): if c_type in ["pie", "doughnut"]: # Here we know its not time series. str is normal labels. chart_labels[no] = "\"" + str(chart_label) + "\"" legend_data.append({'label': chart_label, 'bgcolor': graph_colors[no]}) else: pass chart_labels = ','.join([str(i) for i in chart_labels]) datasets = chart.get('datasets', []) for no, dataset in enumerate(datasets): if c_type in ["bar", "line"]: dataset['background_color'] = "\"" + graph_colors[no] + "\"" if c_type in ["pie", "doughnut"]: dataset['background_color'] = "[ " + ','.join( ["\"" + str(i) + "\"" for i in graph_colors]) + " ]" if c_type in ["line"]: dataset['border_color'] = "\"" + graph_colors[no] + "\"" dataset['fill'] = "false" dataset['data'] = ','.join([str(i) for i in dataset['data']]) return render_template('photonic/chart.html', chart_title=chart.get('title', title), chart_id=chart_id, chart_type=c_type, chart_labels=chart_labels, chart_legend=legend, chart_ysuffix=ysuffix, chart_datasets=datasets, legend_data=legend_data)
def request_id(): # Using random is pretty slow. This is way quicker. # It uses cached proc_id which uses random once for PID. # Then only does this append a counter. # It may not be as unique, but highly unlikely to collide # with recent requet ids. global req_c, pid if req_c is None: req_c = random.randint(1000*1000, 1000*1000*1000) if pid is None: pid = str(os.getpid()) try: proc_id = proc_ids[pid] except KeyError: proc_id = proc_ids[pid] = string_id(6) req_id = req_c = req_c + 1 req_id = hex(req_id)[2:].zfill(8)[-8:] return proc_id + '-' + req_id
def log_formatted(logger_facility, message, prepend=None, append=None, timer=None, log_id=None): """Using logger log formatted content Args: logger_facility (object): Python logger. (log.debug for example) content (str): Message to log. """ try: log_items = list(g.current_request.log.items()) log_items.append(('REQUEST-ID', g.current_request.id)) request = " ".join( ['(%s: %s)' % (key, value) for (key, value) in log_items]) except NoContextError: request = '' message = str(if_bytes_to_unicode(message)).strip() if message != '': if timer is not None: message += ' (DURATION: %s)' % format_seconds(timer) _message = list_of_lines(message) message = [] for line in _message: # Safe Limit per message... # There is resrictions on message sizes. # https://tools.ietf.org/html/rfc3164 # https://tools.ietf.org/html/rfc5426 message += split_by_n(line, 500) if len(message) > 1: if log_id is None: log_id = string_id(6) if prepend is not None: logger_facility("(%s) #0 %s" % ( log_id, prepend, )) for line, p in enumerate(message): msg = '(%s) %s# %s' % (log_id, line + 1, p) logger_facility(msg) if append is not None: logger_facility("(%s) #%s %s" % (log_id, line + 2, append)) else: if log_id is not None: msg = '(%s) ' % log_id else: msg = '' if prepend is not None: msg += '%s %s' % (prepend, message[0]) else: msg += '%s' % message[0] if append is not None: msg = '%s %s' % (msg, append) msg = '%s %s' % (msg, request) logger_facility(msg)
import os import threading import random from luxon import GetLogger from luxon.utils.unique import string_id from luxon.structs.container import Container log = GetLogger(__name__) # Request Counter.. read further down. req_c = random.randint(1000*1000, 1000*1000*1000) # Process ID. proc_id = string_id(6) def request_id(): # Using random is pretty slow. This is way quicker. # It uses cached proc_id which uses random once for PID. # Then only does this append a counter. # It may not be as unique, but highly unlikely to collide # with recent requet ids. global req_c req_id = req_c = req_c + 1 req_id = hex(req_id)[2:].zfill(8)[-8:] return proc_id + '-' + req_id class RequestBase(object):