def prepare_chaincode_features(data, dictionary_name=None, bow_options_name=None, input_name=None, output_dir=None, **kwargs):
    assert bow_options_name != None
    assert dictionary_name != None
    assert input_name != None
    assert output_dir != None

    bow_options = tsh.deserialize(bow_options_name)

    dictionary_name = dictionary_name.format(OUT=output_dir)
    ret = { 'bow_options': bow_options, 'dictionary_name': dictionary_name }
    if os.path.exists(dictionary_name):
        ret.update(tsh.deserialize(dictionary_name))
        ret['feature_names'] = [ 's%02di%03d' % (scale, i) for scale in bow_options['scales'] for i in range(len(ret['dictionary'][scale])) ]
        return ret

    # Compute dictionary - for this we need the chaincodes
    chaincode_dtype = [ ('id', data.dtype['id']) ] + zip(
                [ 'cc%02d' % scale for scale in bow_options['scales'] ],
                ['O'] * len(bow_options['scales']))
    chaincodes = np.zeros(len(data), dtype=chaincode_dtype)
    chaincodes['id'] = data['id']
    mask_prefix = os.path.expanduser(kwargs['mask_prefix'])
    for scale in bow_options['scales']:
        scale_name = 'cc%02d' % scale
        for i in range(len(data)):
            chaincodes[i][scale_name] = get_chaincode_from_image(os.path.join(mask_prefix, data[i]['mask']), scale)
    tsh.serialize(os.path.join(output_dir, input_name + '-chaincodes.dat'), chaincodes)
    lpname = os.path.join(output_dir, input_name + '-lp.dat')
    dictionary = dict(zip(
        ['dictionary', 'counts'],
        train_dictionary(chaincodes, lpname=lpname, labels=data[kwargs['truth']], **bow_options)))
    tsh.serialize(dictionary_name, dictionary)
    ret.update(dictionary)
    ret['feature_names'] = [ 's%02di%03d' % (scale, i) for scale in bow_options['scales'] for i in range(len(ret['dictionary'][scale])) ]
    return ret
示例#2
0
def prepare_chaincode_features(data,
                               dictionary_name=None,
                               bow_options_name=None,
                               input_name=None,
                               output_dir=None,
                               **kwargs):
    assert bow_options_name != None
    assert dictionary_name != None
    assert input_name != None
    assert output_dir != None

    bow_options = tsh.deserialize(bow_options_name)

    dictionary_name = dictionary_name.format(OUT=output_dir)
    ret = {'bow_options': bow_options, 'dictionary_name': dictionary_name}
    if os.path.exists(dictionary_name):
        ret.update(tsh.deserialize(dictionary_name))
        ret['feature_names'] = [
            's%02di%03d' % (scale, i) for scale in bow_options['scales']
            for i in range(len(ret['dictionary'][scale]))
        ]
        return ret

    # Compute dictionary - for this we need the chaincodes
    chaincode_dtype = [
        ('id', data.dtype['id'])
    ] + zip(['cc%02d' % scale for scale in bow_options['scales']],
            ['O'] * len(bow_options['scales']))
    chaincodes = np.zeros(len(data), dtype=chaincode_dtype)
    chaincodes['id'] = data['id']
    mask_prefix = os.path.expanduser(kwargs['mask_prefix'])
    for scale in bow_options['scales']:
        scale_name = 'cc%02d' % scale
        for i in range(len(data)):
            chaincodes[i][scale_name] = get_chaincode_from_image(
                os.path.join(mask_prefix, data[i]['mask']), scale)
    tsh.serialize(os.path.join(output_dir, input_name + '-chaincodes.dat'),
                  chaincodes)
    lpname = os.path.join(output_dir, input_name + '-lp.dat')
    dictionary = dict(
        zip(['dictionary', 'counts'],
            train_dictionary(chaincodes,
                             lpname=lpname,
                             labels=data[kwargs['truth']],
                             **bow_options)))
    tsh.serialize(dictionary_name, dictionary)
    ret.update(dictionary)
    ret['feature_names'] = [
        's%02di%03d' % (scale, i) for scale in bow_options['scales']
        for i in range(len(ret['dictionary'][scale]))
    ]
    return ret
示例#3
0
def read_argsfile(filename):
    """
    Reads in arguments from file.

    Parameters:
    -----------
    filename: string
        Input filename.
    """
    return tsh.deserialize(filename)
def extract_expression(image_file, mask_file, inside_file, expression_file,
        data_file, normalized_width, normalized_height):
    '''
    '''
    if os.path.exists(data_file):
        return tsh.deserialize(data_file)
    image = tsh.read_gray_image(image_file)
    mask = tsh.read_gray_image(mask_file)
    inside, expression = tsh.extract_embryo_gray(image, mask > 0, normalized_width, normalized_height)
    if inside_file is not None:
        cv.imwrite(inside_file, inside)
    if expression_file is not None:
        cv.imwrite(expression_file, 255*expression)
    tsh.serialize(data_file, expression)
    return expression
def extract_expression(image_file, mask_file, inside_file, expression_file,
                       data_file, normalized_width, normalized_height):
    '''
    '''
    if os.path.exists(data_file):
        return tsh.deserialize(data_file)
    image = tsh.read_gray_image(image_file)
    mask = tsh.read_gray_image(mask_file)
    inside, expression = tsh.extract_embryo_gray(image, mask > 0,
                                                 normalized_width,
                                                 normalized_height)
    if inside_file is not None:
        cv.imwrite(inside_file, inside)
    if expression_file is not None:
        cv.imwrite(expression_file, 255 * expression)
    tsh.serialize(data_file, expression)
    return expression
def is_blurred(fine_segmentation, dirname, ids):
    data = tsh.deserialize(os.path.join(fine_segmentation, dirname, 'blur-evaluation.dat'))
    return [ data['data'][data['data']['obj'] == int(metasys_id)][0][data['blur_measure']] < 1.46128831851
            for metasys_id in ids ]
def _get_dissimilarity(i, j, measure, rotation_invariance, data_filei,
                       data_filej):
    expri = tsh.deserialize(data_filei).astype(float)
    exprj = tsh.deserialize(data_filej).astype(float)
    d, t = image_distance(expri, exprj, measure, rotation_invariance)
    return i, j, d, t
def get_dissimilarities(data,
                        output_dir=None,
                        input_name=None,
                        image_prefix=None,
                        mask_prefix=None,
                        n_jobs=None,
                        **kwargs):
    assert image_prefix != None
    assert mask_prefix != None
    assert output_dir != None
    assert input_name != None
    measure = kwargs['measure']
    distance_name = kwargs['distance_name']
    rotation_invariance = kwargs['rotation_invariance']
    normalized_width = kwargs['normalized_width']
    normalized_height = kwargs['normalized_height']
    if n_jobs == None:
        n_jobs = 1

    image_prefix = os.path.expanduser(image_prefix)
    mask_prefix = os.path.expanduser(image_prefix)
    distance_name = distance_name.format(OUT=output_dir,
                                         INPUTNAME=input_name,
                                         **kwargs)
    tsh.makedirs(os.path.dirname(distance_name))
    kwargs['distance_name'] = distance_name

    expr_dir = os.path.join(
        output_dir, 'expr/%04dx%04d' % (normalized_width, normalized_height))
    tsh.makedirs(expr_dir)

    # Make it easier for evaluate.py to create nice html reports.
    if 'create_links' in kwargs and kwargs['create_links'] == True:
        if os.path.exists('expr'):
            os.unlink('expr')
        try:
            os.symlink(expr_dir, 'expr')
        except:
            pass
        if os.path.exists('distance'):
            os.unlink('distance')
        try:
            os.symlink(os.path.dirname(distance_name), 'distance')
        except:
            pass

    save_expr_images = kwargs[
        'save_expr_images'] if 'save_expr_images' in kwargs else False
    if os.path.exists(distance_name):
        D = tsh.deserialize(distance_name)['D']
    else:
        imagenames = [
            os.path.join(image_prefix, sample['image']) for sample in data
        ]
        masknames = [
            os.path.join(mask_prefix, sample['mask']) for sample in data
        ]
        n = len(data)
        logger.info('Extracting %d expressions...', n)
        Parallel(n_jobs=n_jobs, verbose=True, pre_dispatch='2*n_jobs')(
            delayed(_extract_expression)
            (imagenames[j], masknames[j],
             os.path.join(expr_dir, 'inside%02d.png' %
                          data[j]['id']) if save_expr_images else None,
             os.path.join(expr_dir, 'expr%02d.png' %
                          data[j]['id']) if save_expr_images else None,
             os.path.join(expr_dir, 'expr%02d.dat' %
                          data[j]['id']), normalized_width, normalized_height)
            for j in range(n))

        logger.info('Computing %d dissimilarities...', (n * (n - 1)) / 2)
        results = Parallel(
            n_jobs=n_jobs, verbose=True,
            pre_dispatch='2*n_jobs')(delayed(_get_dissimilarity)(
                i, j, measure, rotation_invariance,
                os.path.join(expr_dir, 'expr%02d.dat' % data[i]['id']),
                os.path.join(expr_dir, 'expr%02d.dat' % data[j]['id']))
                                     for j in range(n)
                                     for i in range(j + 1, n))

        logger.info('Transforming results...')
        D = np.zeros((n, n), dtype=np.float)
        tfxs = np.array([['I'] * n] * n)
        for i, j, d, t in results:
            D[j, i] = d
            D[i, j] = d
            tfxs[j, i] = t
            tfxs[i, j] = t

        logger.info('Saving results...')
        tsh.serialize(
            distance_name, {
                'D': D,
                'min': None,
                'max': None,
                'tfxs': tfxs,
                'measure': measure,
                'rotation_invariance': rotation_invariance
            })

    return kwargs, D
def _get_dissimilarity(i, j, measure, rotation_invariance, data_filei, data_filej):
    expri = tsh.deserialize(data_filei).astype(float)
    exprj = tsh.deserialize(data_filej).astype(float)
    d, t = image_distance(expri, exprj, measure, rotation_invariance)
    return i, j, d, t
def get_dissimilarities(data, output_dir=None, input_name=None, image_prefix=None, mask_prefix=None, n_jobs=None, **kwargs):
    assert image_prefix != None
    assert mask_prefix != None
    assert output_dir != None
    assert input_name != None
    measure = kwargs['measure']
    distance_name = kwargs['distance_name']
    rotation_invariance = kwargs['rotation_invariance']
    normalized_width = kwargs['normalized_width']
    normalized_height = kwargs['normalized_height']
    if n_jobs == None:
        n_jobs = 1

    image_prefix = os.path.expanduser(image_prefix)
    mask_prefix = os.path.expanduser(image_prefix)
    distance_name = distance_name.format(OUT=output_dir, INPUTNAME=input_name, **kwargs)
    tsh.makedirs(os.path.dirname(distance_name))
    kwargs['distance_name'] = distance_name

    expr_dir = os.path.join(output_dir, 'expr/%04dx%04d' % (normalized_width, normalized_height))
    tsh.makedirs(expr_dir)

    # Make it easier for evaluate.py to create nice html reports.
    if 'create_links' in kwargs and kwargs['create_links'] == True:
        if os.path.exists('expr'):
            os.unlink('expr')
        try:
            os.symlink(expr_dir, 'expr')
        except:
            pass
        if os.path.exists('distance'):
            os.unlink('distance')
        try:
            os.symlink(os.path.dirname(distance_name), 'distance')
        except:
            pass

    save_expr_images = kwargs['save_expr_images'] if 'save_expr_images' in kwargs else False
    if os.path.exists(distance_name):
        D = tsh.deserialize(distance_name)['D']
    else:
        imagenames = [ os.path.join(image_prefix, sample['image']) for sample in data ]
        masknames = [ os.path.join(mask_prefix, sample['mask']) for sample in data ]
        n = len(data)
        logger.info('Extracting %d expressions...', n)
        Parallel(n_jobs=n_jobs, verbose=True,
            pre_dispatch='2*n_jobs')(
            delayed(_extract_expression)(
                imagenames[j],
                masknames[j],
                os.path.join(expr_dir, 'inside%02d.png' % data[j]['id']) if save_expr_images else None,
                os.path.join(expr_dir, 'expr%02d.png' % data[j]['id']) if save_expr_images else None,
                os.path.join(expr_dir, 'expr%02d.dat' % data[j]['id']),
                normalized_width,
                normalized_height
            ) for j in range(n))

        logger.info('Computing %d dissimilarities...', (n*(n-1))/2)
        results = Parallel(n_jobs=n_jobs, verbose=True,
            pre_dispatch='2*n_jobs')(
            delayed(_get_dissimilarity)(
                i, j,
                measure, rotation_invariance,
                os.path.join(expr_dir, 'expr%02d.dat' % data[i]['id']),
                os.path.join(expr_dir, 'expr%02d.dat' % data[j]['id'])
            ) for j in range(n) for i in range(j+1, n))

        logger.info('Transforming results...')
        D = np.zeros((n, n), dtype=np.float)
        tfxs = np.array([['I'] * n] * n)
        for i, j, d, t in results:
            D[j, i] = d
            D[i, j] = d
            tfxs[j, i] = t
            tfxs[i, j] = t

        logger.info('Saving results...')
        tsh.serialize(distance_name, {
                'D': D,
                'min': None, 'max': None,
                'tfxs': tfxs,
                'measure': measure,
                'rotation_invariance': rotation_invariance })

    return kwargs, D
示例#11
0
def read_propagatorfile(filename):
    return tsh.deserialize(filename)
示例#12
0
def read_classifierfile(filename):
    return tsh.deserialize(filename)