def __init__(self):
     # |X| x |Y| : Each feature in X corresponds to a row and each feature in Y corresponds to a column
     self.rows_per_chunk = int(os.environ.get('ROWS_PER_CHUNK', 500))
     self.cols_per_chunk = int(os.environ.get('COLS_PER_CHUNK', 500))
     self.input_to_id_x = dict((y, x) for x, y in enumerate(sorted(file_parse.load(os.environ['LOCAL_LABELS_FN_X']))))
     self.input_to_id_y = dict((y, x) for x, y in enumerate(sorted(file_parse.load(os.environ['LOCAL_LABELS_FN_Y']))))
     self.num_row_chunks = int(np.ceil(len(self.input_to_id_x) / float(self.rows_per_chunk)))
     self.num_col_chunks = int(np.ceil(len(self.input_to_id_y) / float(self.cols_per_chunk)))
     print('Row Chunks[%d] Col Chunks[%d]' % (self.num_row_chunks, self.num_col_chunks))
Ejemplo n.º 2
0
 def __init__(self):
     # |X| x |Y| : Each feature in X corresponds to a row and each feature in Y corresponds to a column
     self.rows_per_chunk = int(os.environ.get('ROWS_PER_CHUNK', 500))
     self.cols_per_chunk = int(os.environ.get('COLS_PER_CHUNK', 500))
     self.input_to_id_x = dict((y, x) for x, y in enumerate(
         sorted(file_parse.load(os.environ['LOCAL_LABELS_FN_X']))))
     self.input_to_id_y = dict((y, x) for x, y in enumerate(
         sorted(file_parse.load(os.environ['LOCAL_LABELS_FN_Y']))))
     self.num_row_chunks = int(
         np.ceil(len(self.input_to_id_x) / float(self.rows_per_chunk)))
     self.num_col_chunks = int(
         np.ceil(len(self.input_to_id_y) / float(self.cols_per_chunk)))
     print('Row Chunks[%d] Col Chunks[%d]' %
           (self.num_row_chunks, self.num_col_chunks))
Ejemplo n.º 3
0
def convert_leaves_all_probs_pred_old(image, leaves, all_probs, num_leaves, classifiers_fn=None):
    global CLASSIFIERS, CLASSIFIER_FEATURE
    if classifiers_fn is None:
        classifiers_fn = os.environ['CLASSIFIERS_FN']
    get_classifier_confidence = lambda x: x[0][0] * x[0][1]
    if CLASSIFIERS is None:
        all_classifiers = sorted(file_parse.load(classifiers_fn))
        name_classifiers = []
        for x in range(len(all_classifiers)):
            if x < len(all_classifiers):  # TODO(brandyn): Fix memory issue so that we can use the last classifier too
                name_classifiers.append((all_classifiers[x][0],
                                         classifiers.loads(all_classifiers[x][1])))
            else:
                name_classifiers.append((all_classifiers[x][0],
                                         name_classifiers[-1][1]))
            all_classifiers[x] = None  # NOTE(Brandyn): This is done to save memory
        print('ILP Classifiers %r' % ([x for x, _ in name_classifiers],))
        CLASSIFIERS = [x for _, x in name_classifiers]
    if CLASSIFIER_FEATURE is None:
        CLASSIFIER_FEATURE = features.select_feature('bovw_hog')
    feature = CLASSIFIER_FEATURE(np.ascontiguousarray(image[:, :, :3]))
    preds = np.ascontiguousarray([get_classifier_confidence(classifier.predict(feature))
                                  for classifier in CLASSIFIERS], dtype=np.float64)
    out0 = imseg.convert_labels_to_integrals(leaves, num_leaves)
    out1 = imseg.convert_all_probs_to_integrals(all_probs)
    return preds, np.ascontiguousarray(np.dstack([out0, out1]))
Ejemplo n.º 4
0
 def __init__(self):
     self._classifiers = [
         (x, classifiers.loads(y))
         for x, y in file_parse.load(os.environ['CLASSIFIERS_FN'])
     ]
     self._feat = features.select_feature(os.environ['FEATURE'])
     self._image_height, self._image_width = _parse_height_width()
Ejemplo n.º 5
0
def run_classifier_labels(hdfs_input_pos, hdfs_input_neg, hdfs_output, classifier_name, classifier_extra, local_labels, classifier, **kw):
    """
    TODO Finish docstring
    Args:
        hdfs_output: Path to hdfs temporary output or None if execution should be performed locally using hadoopy.launch_local.
    """
    labels = {}
    try:
        labels = file_parse.load(local_labels)
    except IOError:
        pass
    if hdfs_output is None:
        j = hadoopy.launch_local(hdfs_input_pos, None, _lf('collect_keys.py'))
        pos_keys = sum((x[1] for x in j['output']), [])
        j = hadoopy.launch_local(hdfs_input_neg, None, _lf('collect_keys.py'))
        neg_keys = sum((x[1] for x in j['output']), [])
    else:
        hdfs_output_pos = hdfs_output + '/pos'
        hdfs_output_neg = hdfs_output + '/neg'
        picarus._launch_frozen(hdfs_input_pos, hdfs_output_pos, _lf('collect_keys.py'))
        picarus._launch_frozen(hdfs_input_neg, hdfs_output_neg, _lf('collect_keys.py'))
        pos_keys = sum((x[1] for x in hadoopy.readtb(hdfs_output_pos)), [])
        neg_keys = sum((x[1] for x in hadoopy.readtb(hdfs_output_neg)), [])

    labels[classifier_name] = {'labels': {'1': pos_keys, '-1': neg_keys},
                               'classifier': classifier,
                               'classifier_extra': classifier_extra}
    file_parse.dump(labels, local_labels)
Ejemplo n.º 6
0
 def inner(self, k, v):
     try:
         good_input = k in self._picarus_valid_image_hashes
     except AttributeError:
         self._picarus_valid_image_hashes = file_parse.load(os.environ['PICARUS_VALID_IMAGE_HASHES'])
         good_input = k in self._picarus_valid_image_hashes
     if good_input:
         return func(self, k, v)
 def __init__(self):
     self.output_images = int(os.environ.get('OUTPUT_IMAGES', 0))
     path = 'haarcascade_frontalface_default.xml'
     if os.path.exists(path):
         self.cascade = cv.Load(path)
     else:
         raise ValueError("Can't find .xml file!")
     classifier_name, classifier_ser = file_parse.load(os.environ['CLASSIFIERS_FN'])
     self._classifiers = [(classifier_name, classifiers.loads(classifier_ser))]
     self._feat = features.select_feature(os.environ['FEATURE'])
     self._image_height, self._image_width = _parse_height_width()
     self._max_frames = os.environ.get('MAX_FRAMES', float('inf'))
     self._block_size = os.environ.get('BLOCK_SIZE', 900)
     self._match_line_prob = os.environ.get('MATCH_LINE_PROB', 0)
     self._frame_output_prob = os.environ.get('FRAME_OUTPUT_PROB', 0)
     self.timer = Timer()
Ejemplo n.º 8
0
 def __init__(self):
     self.output_images = int(os.environ.get('OUTPUT_IMAGES', 0))
     path = 'haarcascade_frontalface_default.xml'
     if os.path.exists(path):
         self.cascade = cv.Load(path)
     else:
         raise ValueError("Can't find .xml file!")
     classifier_name, classifier_ser = file_parse.load(
         os.environ['CLASSIFIERS_FN'])
     self._classifiers = [(classifier_name,
                           classifiers.loads(classifier_ser))]
     self._feat = features.select_feature(os.environ['FEATURE'])
     self._image_height, self._image_width = _parse_height_width()
     self._max_frames = os.environ.get('MAX_FRAMES', float('inf'))
     self._block_size = os.environ.get('BLOCK_SIZE', 900)
     self._match_line_prob = os.environ.get('MATCH_LINE_PROB', 0)
     self._frame_output_prob = os.environ.get('FRAME_OUTPUT_PROB', 0)
     self.timer = Timer()
Ejemplo n.º 9
0
def predict_classifiers(image, start_ind, stop_ind, classifiers_fn=None):
    global CLASSIFIER_FEATURE, ALL_CLASSIFIERS, CLASSIFIERS
    if classifiers_fn is None:
        classifiers_fn = os.environ['CLASSIFIERS_FN']
    get_classifier_confidence = lambda x: x[0][0] * x[0][1]
    if ALL_CLASSIFIERS is None:
        ALL_CLASSIFIERS = sorted(file_parse.load(classifiers_fn))
    if CLASSIFIER_FEATURE is None:
        CLASSIFIER_FEATURE = features.select_feature('bovw_hog')
    feature = CLASSIFIER_FEATURE(np.ascontiguousarray(image[:, :, :3]))
    preds = {}
    for x in range(start_ind, stop_ind):
        try:
            classifier = CLASSIFIERS[x]
        except KeyError:
            classifier = classifiers.loads(ALL_CLASSIFIERS[x][1])
            CLASSIFIERS[x] = classifier
        preds[x] = get_classifier_confidence(classifier.predict(feature))
    return preds
Ejemplo n.º 10
0
def convert_leaves_all_probs_pred(image, leaves, all_probs, num_leaves, classifiers_fn=None):
    global CLASSIFIER_FEATURE, ALL_CLASSIFIERS, CLASSIFIERS
    preds = []
    if classifiers_fn:
        get_classifier_confidence = lambda x: x[0][0] * x[0][1]
        if ALL_CLASSIFIERS is None:
            ALL_CLASSIFIERS = sorted(file_parse.load(classifiers_fn))
        if CLASSIFIER_FEATURE is None:
            CLASSIFIER_FEATURE = features.select_feature('bovw_hog')
        feature = CLASSIFIER_FEATURE(np.ascontiguousarray(image[:, :, :3]))
        for x in range(len(ALL_CLASSIFIERS)):
            try:
                classifier = CLASSIFIERS[x]
            except KeyError:
                classifier = classifiers.loads(ALL_CLASSIFIERS[x][1])
                if x < 14:
                    CLASSIFIERS[x] = classifier
            preds.append(get_classifier_confidence(classifier.predict(feature)))
    preds = np.ascontiguousarray(preds, dtype=np.float64)
    out0 = imseg.convert_labels_to_integrals(leaves, num_leaves)
    out1 = imseg.convert_all_probs_to_integrals(all_probs)
    return preds, np.ascontiguousarray(np.dstack([out0, out1]))
Ejemplo n.º 11
0
def run_classifier_labels(hdfs_input_pos, hdfs_input_neg, hdfs_output,
                          classifier_name, classifier_extra, local_labels,
                          classifier, **kw):
    """
    TODO Finish docstring
    Args:
        hdfs_output: Path to hdfs temporary output or None if execution should be performed locally using hadoopy.launch_local.
    """
    labels = {}
    try:
        labels = file_parse.load(local_labels)
    except IOError:
        pass
    if hdfs_output is None:
        j = hadoopy.launch_local(hdfs_input_pos, None, _lf('collect_keys.py'))
        pos_keys = sum((x[1] for x in j['output']), [])
        j = hadoopy.launch_local(hdfs_input_neg, None, _lf('collect_keys.py'))
        neg_keys = sum((x[1] for x in j['output']), [])
    else:
        hdfs_output_pos = hdfs_output + '/pos'
        hdfs_output_neg = hdfs_output + '/neg'
        picarus._launch_frozen(hdfs_input_pos, hdfs_output_pos,
                               _lf('collect_keys.py'))
        picarus._launch_frozen(hdfs_input_neg, hdfs_output_neg,
                               _lf('collect_keys.py'))
        pos_keys = sum((x[1] for x in hadoopy.readtb(hdfs_output_pos)), [])
        neg_keys = sum((x[1] for x in hadoopy.readtb(hdfs_output_neg)), [])

    labels[classifier_name] = {
        'labels': {
            '1': pos_keys,
            '-1': neg_keys
        },
        'classifier': classifier,
        'classifier_extra': classifier_extra
    }
    file_parse.dump(labels, local_labels)
 def __init__(self):
     self.labels = file_parse.load(os.environ['LOCAL_LABELS_FN'])
 def __init__(self):
     self._classifiers = [(x, classifiers.loads(y))
                          for x, y in file_parse.load(os.environ['CLASSIFIERS_FN'])]
     self._feat = features.select_feature(os.environ['FEATURE'])
     self._image_height, self._image_width = _parse_height_width()
 def __init__(self):
     self._classifiers = [(x, classifiers.loads(y))
                          for x, y in file_parse.load(os.environ['CLASSIFIERS_FN'])]
 def __init__(self):
     self.labels = file_parse.load(os.environ['LOCAL_LABELS_FN'])