Пример #1
0
    def __init__(self, process, **kwargs):
        self._params = config.get("model_parameters", self.__class__.__name__)
        self._params.update(kwargs)

        for k, v in self._params.items():
            setattr(self, k, v)

        self.__process = process
Пример #2
0
    def __init__(self, process, **kwargs):
        self._params = config.get('model_parameters', self.__class__.__name__)
        self._params.update(kwargs)

        for k, v in self._params.items():
            setattr(self, k, v)

        self.__process = process
Пример #3
0
#!/usr/bin/python
import cv2
import numpy as np

from gestures.demo.hrsm import HandGestureRecognizer
from gestures.demo.gui import DemoGUI
from gestures.gesture_classification import dollar
from gestures.utils.framebuffer import FrameBuffer
from itertools import imap

from gestures import config
params = config.get('model_parameters','dollar')
scale, samplesize = params['scale'], params['samplesize']

# Show preprocessed gesture and closest matching template
def gesture_match(query,template,score,theta,clsid):
    x,y = query
    n = len(x)
    x,y = dollar.preprocess(x,y,scale,samplesize)

    if score > 0.8:
        query = dollar.rotate(x,y,theta)
        artists.append((gui.lines['template'],template))
        title = "%s (N=%d, score: %.2f)" % (clsid,n,score)
    else:
        query = x,y
        title = "No match (scored too low)"
    artists.append((gui.lines['query'],query))
    gui.axes['match'].set_title(title)
    global redraw
    redraw = True
Пример #4
0
#!/usr/bin/python
import h5py
import numpy as np
import matplotlib.pyplot as plt
from gestures.gesture_classification import dollar
from gestures.config import model_parameters as params
from gestures import config
import sys

templates_fh = h5py.File(config.get('gesture_templates'),'r')
libras_fh = h5py.File(sys.argv[1],'r')

print """
dollar classifier demo
======================
Use directional keys to navigate matches
"""

NSAMPLES = sum(len(ds) for ds in libras_fh.itervalues())
try:
    CNT   = 0
    scale = params['dollar']['scale']
    N     = params['dollar']['samplesize']

    fig = plt.figure()
    axes = {}
    axes['query'] = plt.subplot2grid((3,3), (1, 0))
    axes['transform'] = plt.subplot2grid((3,3), (1, 1))
    axes['match_0'] = plt.subplot2grid((3,3), (0, 2))
    axes['match_1'] = plt.subplot2grid((3,3), (1, 2))
    axes['match_2'] = plt.subplot2grid((3,3), (2, 2))
Пример #5
0
#!/usr/bin/python
import h5py
import numpy as np
import matplotlib.pyplot as plt
from gestures.gesture_classification import dollar
from gestures.config import model_parameters as params
from gestures import config
import sys

templates_fh = h5py.File(config.get('gesture_templates'), 'r')
libras_fh = h5py.File(sys.argv[1], 'r')

print """
dollar classifier demo
======================
Use directional keys to navigate matches
"""

NSAMPLES = sum(len(ds) for ds in libras_fh.itervalues())
try:
    CNT = 0
    scale = params['dollar']['scale']
    N = params['dollar']['samplesize']

    fig = plt.figure()
    axes = {}
    axes['query'] = plt.subplot2grid((3, 3), (1, 0))
    axes['transform'] = plt.subplot2grid((3, 3), (1, 1))
    axes['match_0'] = plt.subplot2grid((3, 3), (0, 2))
    axes['match_1'] = plt.subplot2grid((3, 3), (1, 2))
    axes['match_2'] = plt.subplot2grid((3, 3), (2, 2))
Пример #6
0
import cv2
import numpy as np
import h5py

from gestures.gesture_classification import dollar
from gestures.segmentation import SkinMotionSegmenter
from gestures.hand_detection import ConvexityHandDetector
from gestures.tracking import CrCbMeanShiftTracker
from gestures.core.common import findBBoxCoM_contour, findBBoxCoM
from abc import ABCMeta, abstractmethod

from gestures import config
params = config.get('model_parameters', 'dollar')
scale, samplesize = params['scale'], params['samplesize']

# global constants
WAIT_PERIOD = 5
VAL_PERIOD = 1
MINWAYPTS = 10


class StateMachineBase(object):
    __metaclass__ = ABCMeta

    def __init__(self, init_state):
        self._state = init_state

    @property
    def state(self):
        return self._state.__name__
Пример #7
0
#!/usr/bin/python
import cv2
import numpy as np

from gestures.demo.hrsm import HandGestureRecognizer
from gestures.demo.gui import DemoGUI
from gestures.gesture_classification import dollar
from gestures.utils.framebuffer import FrameBuffer
from itertools import imap

from gestures import config
params = config.get('model_parameters', 'dollar')
scale, samplesize = params['scale'], params['samplesize']


# Show preprocessed gesture and closest matching template
def gesture_match(query, template, score, theta, clsid):
    x, y = query
    n = len(x)
    x, y = dollar.preprocess(x, y, scale, samplesize)

    if score > 0.8:
        query = dollar.rotate(x, y, theta)
        artists.append((gui.lines['template'], template))
        title = "%s (N=%d, score: %.2f)" % (clsid, n, score)
    else:
        query = x, y
        title = "No match (scored too low)"
    artists.append((gui.lines['query'], query))
    gui.axes['match'].set_title(title)
    global redraw
Пример #8
0
import cv2
import numpy as np
import h5py

from gestures.gesture_classification import dollar
from gestures.segmentation import SkinMotionSegmenter
from gestures.hand_detection import ConvexityHandDetector
from gestures.tracking import CrCbMeanShiftTracker
from gestures.core.common import findBBoxCoM_contour,findBBoxCoM
from abc import ABCMeta, abstractmethod

from gestures import config
params = config.get('model_parameters','dollar')
scale, samplesize = params['scale'], params['samplesize']

# global constants
WAIT_PERIOD = 5
VAL_PERIOD = 1
MINWAYPTS = 10

class StateMachineBase(object):
    __metaclass__ = ABCMeta

    def __init__(self,init_state):
        self._state = init_state

    @property
    def state(self):
        return self._state.__name__

    def tick(self,*args):