from logger import set_up_logging sys.path.append( os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir)) sys.path.insert(0, os.path.dirname(__file__)) # @modified 20191115 - Branch #3262: py3 # This prevents flake8 E402 - module level import not at top of file if True: import settings import flux from skyline_functions import (get_redis_conn, get_redis_conn_decoded) # @added 20210718 - skyline-syslog from matched_or_regexed_in_list import matched_or_regexed_in_list logger = set_up_logging(None) try: SERVER_METRIC_PATH = '.%s' % settings.SERVER_METRICS_NAME if SERVER_METRIC_PATH == '.': SERVER_METRIC_PATH = '' except: SERVER_METRIC_PATH = '' # Wrap per metric logging in if FLUX_VERBOSE_LOGGING try: FLUX_VERBOSE_LOGGING = settings.FLUX_VERBOSE_LOGGING except: FLUX_VERBOSE_LOGGING = True try:
import falcon import json from app.utils.db_connect import DBConnect from logger import set_up_logging from app.models.schema import OrderCart, OrderDetails, PhleboDetails, PickupAddress, PaymentDetails from config.base import Session, engine, Base logger = set_up_logging() # generate database schema Base.metadata.create_all(engine) # create a new session session = Session() class GetUserHistory(object): """Serves all apis related to Orders """ def on_get(self, req, resp): try: cart_order = session.query(OrderCart).filter_by(patient_id=426).all() data = [] for item in cart_order: order = { 'phlebo_name': item.phlebo_detail.phlebo_name, 'phlebo_number': item.phlebo_detail.phlebo_phone } data.append(order) logger.info("Data fetched successfully : {}".format("Success"))
import json import traceback from ast import literal_eval from redis import StrictRedis import falcon import graphyte sys.path.append( os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir)) sys.path.insert(0, os.path.dirname(__file__)) import settings from logger import set_up_logging import flux logger = set_up_logging('populate_metric') # URI arguments are solely used for identifying requests in the log, all the # required metric data is submitted via a POST with a json payload. validArguments = ['remote_target', 'metric', 'namespace_prefix', 'key', 'user'] LOCAL_DEBUG = False # LOCAL_DEBUG = True GRAPHITE_METRICS_PREFIX = None CARBON_HOST = settings.FLUX_CARBON_HOST CARBON_PORT = settings.FLUX_CARBON_PORT if settings.FLUX_SEND_TO_CARBON: try: graphyte.init(CARBON_HOST, port=CARBON_PORT, prefix=None, timeout=5) logger.info(
except ImportError: from queue import Empty # Python 3 from time import sleep, time from ast import literal_eval from redis import StrictRedis import graphyte import statsd from logger import set_up_logging sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir)) sys.path.insert(0, os.path.dirname(__file__)) import settings from skyline_functions import send_graphite_metric logger = set_up_logging('worker') try: SERVER_METRIC_PATH = '.%s' % settings.SERVER_METRICS_NAME if SERVER_METRIC_PATH == '.': SERVER_METRIC_PATH = '' except: SERVER_METRIC_PATH = '' parent_skyline_app = 'flux' skyline_app_graphite_namespace = 'skyline.%s%s.worker' % (parent_skyline_app, SERVER_METRIC_PATH) if settings.FLUX_SEND_TO_CARBON: GRAPHITE_METRICS_PREFIX = None CARBON_HOST = settings.FLUX_CARBON_HOST CARBON_PORT = settings.FLUX_CARBON_PORT
import pickle import struct import requests from redis import StrictRedis import graphyte import pandas as pd from logger import set_up_logging sys.path.append( os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir)) sys.path.insert(0, os.path.dirname(__file__)) import settings from skyline_functions import send_graphite_metric logger = set_up_logging('populate_metric_worker') this_host = str(os.uname()[1]) try: SERVER_METRIC_PATH = '.%s' % settings.SERVER_METRICS_NAME if SERVER_METRIC_PATH == '.': SERVER_METRIC_PATH = '' except: SERVER_METRIC_PATH = '' parent_skyline_app = 'flux' skyline_app_graphite_namespace = 'skyline.%s%s.populate_metric_worker' % ( parent_skyline_app, SERVER_METRIC_PATH) # LOCAL_DEBUG = False
import os from time import time import json import traceback from multiprocessing import Queue from logger import set_up_logging import falcon sys.path.append( os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir)) sys.path.insert(0, os.path.dirname(__file__)) import settings import flux logger = set_up_logging('listen') # LOCAL_DEBUG = False LOCAL_DEBUG = True def validate_key(caller, apikey): try: isAlNum = False isAlNum = apikey.isalnum() if isAlNum: keyLength = len(apikey) if keyLength == 32: # Check to determine if it is a valid API key keyValid = True else: