示例#1
0
 def log(self, *args):
     from lib import logger, LOG_NOTICE
     for arg in args:
         if type(arg) in (str, unicode):
             text = arg
         else:
             text = repr(arg)
         logger(LOG_NOTICE, text)
示例#2
0
 def log(self, *args):
     from lib import logger, LOG_NOTICE
     for arg in args:
         if type(arg) in (str, unicode):
             text = arg
         else:
             text = repr(arg)
         logger(LOG_NOTICE, text)
示例#3
0
def test_loglevel_error():
    trace = "tests#test_logger#test_loglevel_info"
    info("Enter", trace)

    init_logger("ERROR")
    assert logger().getEffectiveLevel() == logging.ERROR

    info("Exit", trace)
示例#4
0
def test_loglevel_info():
    trace = "tests#test_logger#test_loglevel_info"
    info("Enter", trace)

    init_logger("INFO")
    assert logger().getEffectiveLevel() == logging.INFO

    info("Exit", trace)
    def POST(self, pluginName=None, *args, **kwargs):
        """Classify an image using given plugin name and return predictions.

        @param pluginName: Optional name of plugin to lookup. Raise an error
            if not supplied or not a valid name.

        @return predictions: list of strings for predicted labels, ordered as
            most likely first.
        """
        if pluginName is None:
            raise cherrypy.HTTPError(405,
                                     "POST method requires a plugin name.")

        try:
            plugin = PLUGINS[pluginName]
        except KeyError:
            raise cherrypy.HTTPError(
                400,
                "Expected plugin name as one of {names}, but got: '{actual}'.".
                format(names=CONFIGURED_PLUGINS, actual=pluginName))

        if plugin is None:
            raise cherrypy.HTTPError(
                500,
                "That plugin has not been setup on the server. Ensure it has"
                " a valid graph file and that this is indicated in the"
                " model conf file.")

        startTime = time.time()
        predictions = plugin.process(**kwargs)
        maxResults = conf.getint('predictions', 'maxResults')
        predictions = predictions[:maxResults]

        duration = time.time() - startTime
        msg = "Completed request. Name: {name}. Duration: {duration:4.3f}s."\
            .format(
                name=pluginName,
                duration=duration
            )
        logger(msg, context="SERVICES.CLASSIFY.PREDICTION")

        cherrypy.response.status = 201

        return predictions
示例#6
0
    def process(self, imagePath=None, imageFile=None, x=None, y=None):
        """Do category prediction on the configured plugin instance using
        input image and co-ordinate point.

        Expects an image and mark co-ordinates, then does a prediction
        for categories and returns as a list, ordered from highest to lowest
        probability.

        @param imagePath: Default None. path to local image on server, as
            a string.
        @param imageFile: Default None. Multi-party cherrpy request body.
            This is expected to have a file attribute with image data.
        @param x: Default None. The X co-ordinate of the mark on the image, as
            a percentage value from 0 to 100. As an integer.
        @param y: Default None. The Y co-ordinate of the mark on the image, as
            a percentage value from 0 to 100. As an integer.

        @return predictions: list of categories prediction values, as strings.
        """
        start = time.time()

        if imagePath:
            image = imagePath
            filename = os.path.basename(image)
        elif imageFile:
            image = imageFile.file
            filename = imageFile.filename
        else:
            raise ValueError("Expected value for either`imagePath` or"
                             " `imageFile` parameters.")

        preProcessedImg = self._preProcessImg(image, x, y)
        predictions = self._doPrediction(preProcessedImg)

        msg = "Completed prediction. Duration: {duration:4.3f}s."\
            " Filename: {filename}. Results: {results}.".format(
                duration=time.time() - start,
                filename=filename,
                results=json.dumps(predictions)
            )
        logger(msg=msg, context=self.getContext())

        return predictions
示例#7
0
def test_canAppendLoggers():
    time = MagicMock()
    time.dateTimeIso.return_value = '1'
    logger = lib.logger.config(time=time,
                               include=['.*'],
                               exclude=[],
                               enabled=True)

    assert logger(append='1')(append='2')(
        append='3')('abc') == '[1][1:2:3] abc'
示例#8
0
def test_canSpecifyCustomName():
    time = MagicMock()
    time.dateTimeIso.return_value = '1'
    logger = lib.logger.config(time=time,
                               include=['.*'],
                               exclude=[],
                               enabled=True)

    assert logger(append='1')(append='2')(append='3')(
        'abc', name='custom') == '[1][1:2:3:custom] abc'
示例#9
0
    def _loadGraph(self, modelPath):
        """Prepare the plugin's graph definition so we can use its tensors.

        The time taken for this method to compelte is sent to the error log.

        This is a private method which be called internally on model
        initialisation. It unpersists a TensorFlow graph from the proto-buf
        file, so that we can use it in a TensorFlow session to do predictions.

        According to the `tf.import_graph_def` docs, it expects a `GraphDef`
        proto containing operations to be imported in the default graph, so
        they be extracted as a `tf.Tensor` and `tf.Operation` objects.

        @param modelPath: path to the model graph file, as a string.

        @return graph: a tf.Graph instance which contains the graph definition
            for the required model.
        """
        startTime = time.time()

        # Create a new graph then set it as the default.
        graph = tf.Graph()
        with graph.as_default():
            with tf.gfile.FastGFile(modelPath, 'rb') as fIn:
                # Create an empty graph-def then load the proto-buf file into
                # the graph-def.
                graphDef = tf.GraphDef()
                graphDef.ParseFromString(fIn.read())

                # Import the graph-def to the default TensorFlow graph.
                # Name is a prefix for the tensor and should be left empty.
                tf.import_graph_def(graphDef, name='')

        duration = time.time() - startTime
        msg = "Loaded model. Duration: {0:4.3f}s.".format(duration)
        logger(msg, context=self.getContext())

        return graph
示例#10
0
 def log(self, msg):
     from lib import logger
     logger(LOG_NOTICE, msg)
示例#11
0
 def _add_logger(self):
     self.logger = logger('')
示例#12
0
 def __init__(self):
     self.mml = MultiMediaLib()
     self.mh = MediaHandler()
     self.logger = logger()
 def __init__(self):
     self.logger = logger()
     self.video_codec_list = ['avi', 'cam', 'mkv', 'mov', 'mpeg', 'mpg', 'mpe', 'svi', 'wmv',]
     self.music_codec_list = ['jpeg', 'png', 'ppm', 'jpg', 'tga']
     self.image_codec_list = []
     self.doc_codec_list = []
示例#14
0
import time, machine, update, wifisettings, nodesettings, lib.requests, lib.logger, lib.requests, lib.timew, time, os, machine
from umqtt.robust import MQTTClient
from machine import Pin, WDT
led = Pin(2, Pin.OUT)
led.off()

t = lib.timew.Time(time=time)

# Configure Logger
logger = lib.logger.config(enabled=nodesettings.settings['debug'],
                           include=nodesettings.settings['logInclude'],
                           exclude=nodesettings.settings['logExclude'],
                           time=t)
log = logger(append='boot')
log("The current time is %s" % t.human())

mqtt_pth = nodesettings.settings['controllerName']
client = MQTTClient(mqtt_pth,
                    wifisettings.settings['mqtt_ip'],
                    port=wifisettings.settings['mqtt_port'])

mq_c = False
for i in range(1):
    try:
        client.connect()
        mq_c = True
    except:
        log('MQTT: cannot connect')
        led.on()
        time.sleep(1)
if not mq_c: