Example #1
0
 def wrapped(instance, *v, **k):
     started = time.time()
     try:
         return fun(instance, *v, **k)
     finally:
         elapsed = time.time() - started
         Logger.create(instance).debug(
                 '%s in %.2fs.', self.message, elapsed)
Example #2
0
    def __init__(self, indexer, params, save_path=DEFAULT_PATH):
        self.log = Logger.create(self)
        self.max_length = params.max_length
        self.batch_size = params.batch_size
        self.num_hidden = params.num_hidden
        self.keep_prob = params.keep_prob
        self.num_layers = params.num_layers
        self.epoch = params.epoch
        self.error = params.error
        self.save_path = save_path
        self.vector_dims = indexer.dimensions

        self.session = tf.Session(graph=tf.Graph())
        self.graph = self.reuse_graph()
        self.lookup = Lookup(indexer, self.max_length)
Example #3
0
 def __init__(self, size, evict_action):
     self.log = Logger.create(self)
     self.size = size
     self.cache = collections.OrderedDict()
     self.action = evict_action
Example #4
0
 def __init__(self):
     self.log = Logger.create(self)
     self.dictionary = {}
     self.vectors = []
     self.dimensions = None
Example #5
0
import argparse
import logging

from classify.indexer import Indexer
from classify.loader import Loader
from classify.params import Params
from classify.model import Model
from classify.util.logger import Logger


def evaluation_result(prediction):
    return '{:.1f}% negative, {:.1f}% positive.'.format(
            *(round(p * 100) for p in prediction))


Logger.initialize(logging.DEBUG)
parser = argparse.ArgumentParser('runner')

parser.add_argument('-t', '--train',
                    dest='train',
                    metavar='TRAINING_SET',
                    action='store',
                    help='retrain the model using the train set')

parser.add_argument('-r', '--representations',
                    dest='representations',
                    action='store',
                    help='file with vector representations of words',
                    required=True)

parser.add_argument('-s', '--sentences',
Example #6
0
                    required=True)

parser.add_argument('-c', '--cache-size',
                    dest='cache_size',
                    action='store',
                    help='model cache size',
                    type=int,
                    default=3)

args = parser.parse_args()

app = Flask(__name__)
provider = Provider(
        args.representations, args.model_directory, args.cache_size)

Logger.initialize(logging.DEBUG)
log = Logger.create_with_name('Server')


@app.errorhandler(Exception)
def errorhandler(error):
    logging.exception('Error when processing query.')
    return failure(500, str(error))


def failure(code, message):
    return jsonify(message), code


def success(message='OK'):
    return jsonify(message), 200
Example #7
0
 def __init__(self, indexer):
     self.log = Logger.create(self)
     self.indexer = indexer