Ejemplo n.º 1
0
# Options
parser.add_argument("--batch", type=positive, default=512, help="Number of source word in the batch.")
parser.add_argument("--src", type=str, help="Specify this to do batched decoding, it has a priority than stdin.")
parser.add_argument("--use_cpu", action="store_true")
parser.add_argument("--gpu", type=int, default=-1, help="Which GPU to use (Negative for cpu).")
parser.add_argument("--verbose", action="store_true")
args  = parser.parse_args()

""" Sanity Check """
if args.use_cpu:
    args.gpu = -1

# Loading model
UF.trace("Setting up classifier")
model    = ParallelTextClassifier(args, use_gpu=args.gpu, collect_output=True)
SRC, TRG = model.get_vocabularies()

# Testing callbacks
def print_result(ctr, trg, TRG, src, SRC, fp=sys.stderr):
    for i, (sent, result) in enumerate(zip(src, trg.y)):
        print(ctr + i, file=fp)
        print("INP:", SRC.str_rpr(sent), file=fp)
        print("TAG:", TRG.str_rpr(result), file=fp)
   
def onDecodingStart():
    UF.trace("Tagging started.")

def onBatchUpdate(ctr, src, trg):
    # Decoding
    if args.verbose:
        print_result(ctr, trg, TRG, src, SRC, sys.stderr)