コード例 #1
0
ファイル: utils.py プロジェクト: iPhaeton/supervisors
def mean_distances(embeddings, labels, metric, normalized):
    if normalized == True:
        embeddings = l2_normalized(embeddings)

    labels = array_ops.reshape(labels, [array_ops.shape(labels)[0], 1])

    dist_matrix = metric(embeddings)
    adjacency = compose(
        tf_equal(labels),
        array_ops.transpose,
    )(labels)
    adjacency_not = compose(
        tf_cast(dtype=dtypes.float32),
        math_ops.logical_not,
    )(adjacency)
    adjacency = math_ops.cast(adjacency, dtype=dtypes.float32)

    pdist_matrix = tf.multiply(dist_matrix, adjacency)
    _ndist_matrix = tf.multiply(dist_matrix, adjacency_not)
    ndist_matrix = compose(
        tf_add(_ndist_matrix),
        tf_multiply(adjacency),
        tf_add(tf.reduce_max(dist_matrix, axis=1, keepdims=True)),
    )(pdist_matrix)

    positive_mean_distance = math_ops.reduce_mean(pdist_matrix)
    negative_mean_distance = math_ops.reduce_mean(_ndist_matrix)
    hardest_positive_dist = tf.reduce_max(pdist_matrix, axis=1, keepdims=True)
    hardest_mean_positive_distance = tf.reduce_mean(hardest_positive_dist)
    hardest_negative_dist = tf.reduce_min(ndist_matrix, axis=1, keepdims=True)
    hardest_mean_negative_distance = tf.reduce_mean(hardest_negative_dist)

    return positive_mean_distance, negative_mean_distance, hardest_mean_positive_distance, hardest_mean_negative_distance
コード例 #2
0
def is_valid_user_email_and_password(user_email, password):
    maybe_user_auth_data = with_session(compose(
        maybe_dump_model,
        maybe_user_auth(user_email)
    ))
    return maybe_user_auth_data and verify(
        password,
        maybe_user_auth_data["password"]
    )
コード例 #3
0
ファイル: metrics.py プロジェクト: iPhaeton/supervisors
def cosine_distance(embeddings, evaluate=None):
    """
    Compute cosine distance matrix
    
    Parameters:
    -----------
    - embeddings: Tensor(N, E)
        Image embeddings, outputs of the convolutional network.
        N - number of samples (None)
        E - embedding size
    """
    embeddings = tf.divide(embeddings,
                           tf.norm(embeddings, axis=1, keep_dims=True))

    diag_mask = compose(
        tf.abs,
        partial(tf.add, -1.),
    )(tf.eye(num_rows=tf.shape(embeddings)[0]))

    return compose(
        partial(tf.multiply, diag_mask),
        partial(tf.subtract, 1.),
        partial(tf.matmul, embeddings),
    )(tf.transpose(embeddings))
コード例 #4
0
ファイル: create.py プロジェクト: pombredanne/grokker
def create_model(json):
    name = json["name"]
    type_name = json["type"]
    datasource_id = json["datasource"]
    input_columns = json["input-columns"]
    type_id = with_session(compose(
        getattr("id"),
        query_type_by_name(type_name)
    ))
    new_model_id = with_session(create_row(ModelModel, {
        "name": name,
        "type_id": type_id,
        "datasource_id": datasource_id
    }))["id"]
    with_session(create_input_columns(input_columns, new_model_id))
    queue_model_creation_task(json)
    return get_model(new_model_id)
コード例 #5
0
ファイル: proxy.py プロジェクト: jackfirth/docker-auth
def proxy_route(app):
    return compose(
        app.route('/', defaults={'path': ''}, methods=allowed_methods),
        app.route('/<path:path>', methods=allowed_methods)
    )
コード例 #6
0
                                maybe_none_resolver)

data = cata(tagfn(me.find_all('tr')), at3)
data2 = cata(tagfn(me.find_all('td')), data)
data_texts = cata(tagfn(mn.get_text()), data2)
data3 = cata(tagfn(mn.find('a')), data2)
data_titles = cata(tagfn(mn.get('title')), data3)
data_links = cata(tagfn(mn.get('href')), data3)
#data5 = cata(tagfn(maybe_none_resolver),data4)

uw_titles = cata(lambda x: x.value
                 if isinstance(x, md.Maybe) else x, data_titles)
uw_texts = cata(lambda x: x.value
                if isinstance(x, md.Maybe) else x, data_texts)
w_titles = cata(
    r.if_else(r.isinstance(list), r.identity,
              r.compose(md.First, maybe_none_resolver)), uw_titles)
w_texts = cata(r.if_else(r.isinstance(list), r.identity, maybe_none_resolver),
               uw_texts)

out = cata(
    r.if_else(
        lambda x: True
        if isinstance(x, tuple) and isinstance(x[0], md.Container) else False,
        r.apply(r.add), r.identity), (w_titles, w_texts))

out2 = cata(
    r.if_else(r.isinstance(md.First), lambda x: x.value.value, r.identity),
    out)
out3 = cata(r.if_else(r.isinstance(md.Just), lambda x: x.value, r.identity),
            out2)
コード例 #7
0
ファイル: get.py プロジェクト: pombredanne/grokker
def get_recommender(id):
    return with_session(compose(
        recommender_model_to_dict,
        query_id(RecommenderModel, id)
    ))
コード例 #8
0
ファイル: get.py プロジェクト: pombredanne/grokker
def get_model(id):
    return with_session(compose(
        model_model_to_dict,
        query_id(ModelModel, id)
    ))
コード例 #9
0
ファイル: jobs.py プロジェクト: iPhaeton/supervisors
def siamese_job(source_path, model_loader, **kwargs):
    loss_fn = kwargs.pop('loss_fn', None)
    batch_size = kwargs.pop('batch_size', None)
    num_per_class = kwargs.pop('num_per_class', 5)
    lr = kwargs.pop('lr', 1e-3)
    normalized = kwargs.pop('normalized', True)
    num_classes = kwargs.pop('num_classes')
    normalized_input = kwargs.pop('normalized_input')

    tf.reset_default_graph()

    dirs = compose(
        filter_list(['.DS_Store'], False),
        os.listdir,
    )(source_path)

    labels = classes_to_labels(dirs)
    train_dirs, val_dirs, train_labels, val_labels = train_test_split(
        dirs, labels, test_size=0.1)

    if (num_classes != None):
        train_dirs = train_dirs[0:num_classes]
        train_labels = train_labels[0:num_classes]
        val_dirs = val_dirs[0:num_classes]
        val_labels = val_labels[0:num_classes]

    session = tf.Session()
    inputs, outputs, is_pretrained = model_loader(session)
    labels = tf.placeholder(name='labels', dtype=tf.int32, shape=(None, ))

    eval_samples, eval_labels = load_batch_of_images(
        path=source_path,
        num_per_class=4,
        image_shape=(128, 64, 3),
        loader=cv2_loader,
        dirs=train_dirs[0:3],
        labels=train_labels[0:3],
        batch_size=None,
        normalize=normalized_input,
    )
    ctx['evaluator'].initialize(session=session,
                                feed_dict={
                                    inputs: eval_samples,
                                    labels: eval_labels,
                                })

    model = create_siamese_graph(session=session,
                                 base_model=[inputs, outputs, labels],
                                 optimizer=tf.train.AdamOptimizer,
                                 loss_fn=loss_fn,
                                 is_pretrained=is_pretrained,
                                 normalized=normalized)

    train_siamese_model(
        session=session,
        model=model,
        dirs=[train_dirs, val_dirs],
        labels=[train_labels, val_labels],
        initial_lr=lr,
        batch_generator=batch_of_images_generator(
            path=source_path,
            dirs=train_dirs,
            labels=train_labels,
            num_per_class=num_per_class,
            batch_size=batch_size,
            image_shape=(128, 64, 3),
            loader=cv2_loader,
            shuffle=True,
            normalize=normalized_input,
        ),
        batch_loader=partial(
            load_batch_of_images,
            path=source_path,
            num_per_class=num_per_class,
            image_shape=(128, 64, 3),
            loader=cv2_loader,
            normalize=normalized_input,
        ),
        is_pretrained=is_pretrained,
        batch_size=batch_size,
        **kwargs,
    )
コード例 #10
0
ファイル: get.py プロジェクト: pombredanne/grokker
def get_datasource(id):
    return with_session(compose(
        datasource_model_to_dict,
        query_id(DatasourceModel, id)
    ))
コード例 #11
0
ファイル: config.py プロジェクト: jackfirth/docker-auth
)

bool_string_environ = pred_environ(
    is_bool_string,
    bool_string_to_bool,
    "either 'true' or 'false'"
)

hash_alg_environ = pred_environ(
    is_hash_alg,
    identity,
    "one of {0}" % allowed_hash_algorithms
)

default_hash_alg_environ = compose(
    hash_alg_environ,
    default_environ(default_hash_algoirthm)
)

default_hash_rounds_environ = compose(
    int_string_environ,
    default_environ("40000")
)

min_password_length_environ = compose(
    int_string_environ,
    default_environ("12")
)

password_char_set_environ = default_environ(None)

TARGET_SERVICE_HOST = required_environ("TARGET_SERVICE_HOST").value
コード例 #12
0
ファイル: common.py プロジェクト: iPhaeton/supervisors
from pyramda import compose


def filter_list(refs, condition, l):
    if condition == True:
        return list(filter(lambda x: x in refs, l))
    else:
        return list(filter(lambda x: x not in refs, l))


classes_to_labels = compose(
    list,
    range,
    len,
)