def score(msg): ret = {} msg = decode_netstring_fd(cStringIO.StringIO(msg)) #cueset_size, cues = ainodex.expand_cueset( # map(int, msg['cues'].split())) cueset_size, cues = ainodex.hits(map(int, msg['cues'].split()), 0) cueset_size, cues = filter_hits(cueset_size, cues,\ prior_check = True, site_check = True) ret['cueset_size'] = str(cueset_size) ok_layers = [i for i, maxf in enumerate(LAYERS) if min(maxf, cueset_size) / float(max(maxf, cueset_size)) > MIN_SCORE] if len(LAYERS) - 1 in ok_layers: ok_layers.append(len(LAYERS)) print "OK", ok_layers, "CUES", cueset_size t = time.time() for i in ok_layers: layer = ainodex.new_layer(i, cues) ret[str(i)] = ainodex.serialize_layer(layer) erlay.report("Scoring <%s> took %dms" % (msg['cues'], (time.time() - t) * 1000.0)) return encode_netstring_fd(ret)
def score(msg): ret = {} msg = decode_netstring_fd(cStringIO.StringIO(msg)) #cueset_size, cues = ainodex.expand_cueset( # map(int, msg['cues'].split())) cueset_size, cues = ainodex.hits(map(int, msg['cues'].split()), 0) cueset_size, cues = filter_hits(cueset_size, cues,\ prior_check = True, site_check = True) ret['cueset_size'] = str(cueset_size) ok_layers = [ i for i, maxf in enumerate(LAYERS) if min(maxf, cueset_size) / float(max(maxf, cueset_size)) > MIN_SCORE ] if len(LAYERS) - 1 in ok_layers: ok_layers.append(len(LAYERS)) print "OK", ok_layers, "CUES", cueset_size t = time.time() for i in ok_layers: layer = ainodex.new_layer(i, cues) ret[str(i)] = ainodex.serialize_layer(layer) erlay.report("Scoring <%s> took %dms" % (msg['cues'], (time.time() - t) * 1000.0)) return encode_netstring_fd(ret)
def merge_scores(msg): msg = cStringIO.StringIO(msg) layers = [None] * 10 cueset_size = 0 while True: try: iblock_layers = decode_netstring_fd(msg) except EOFError: break cueset_size += int(iblock_layers['cueset_size']) del iblock_layers['cueset_size'] for layer_data in iblock_layers.itervalues(): offs, layer_id, layer =\ ainodex.deserialize_layer( layer_data, layers) #XXX: Since ixemes are allocated on different layers on each layer, # we must make sure that the ixeme counts match on every layer. This # could be easily avoided if ixemes were on the same layers on all # iblocks. This should be easy to fix. t = time.time() ainodex.sync_layers(layers) erlay.report("Syncing layers took %dms" %\ ((time.time() - t) * 1000.0)) print "CUE", type(cueset_size), cueset_size for layer in layers: if layer: ainodex.normalize_layer(layer, normtable, cueset_size) layers = [(str(i), ainodex.serialize_layer(layer)) for i, layer in enumerate(layers) if layer] return encode_netstring_fd(dict(layers))