Esempio n. 1
0
def entailment_worker(premise, hypothesis, manager_dict):
    '''
    Given a premise and hypothesis, add their similarity score and entailment
    code to the process manager_dict
    '''
    aligner = al.Aligner()
    p_tokens = word_tokenize(premise)
    h_tokens = word_tokenize(hypothesis)
    alignments, score = aligner.align(p_tokens, h_tokens, 'default')

    #monotonicity_operators_file = os.path.join(os.path.dirname(__file__),
    #'resources/monotonicity_operators_list.txt')
    #with open(monotonicity_operators_file) as f:
        #monotonicity_operators = f.readlines()
    #monotonicity_operators = [l.rstrip() for l in monotonicity_operators]

    #pipeline_start = time()
    #if len([t for t in p_tokens if t in monotonicity_operators]) > 0:
        #p_monotonicity_markings = Marker.get_monotonicity_markings(p_tokens)
        #p_marked_tokens = dict(zip(p_tokens, p_monotonicity_markings))
    #else:
        #print 'Not marking p'
        #p_monotonicity_markings = ['up'] * len(p_tokens)
        #p_marked_tokens = dict(zip(p_tokens, p_monotonicity_markings))

    #if len([t for t in h_tokens if t in monotonicity_operators]) > 0:
        #h_mark_start = time()
        #h_monotonicity_markings = Marker.get_monotonicity_markings(h_tokens)
        #h_marked_tokens = dict(zip(h_tokens, h_monotonicity_markings))
        #print 'H marked in %s' % (time() - h_mark_start)
    #else:
        #print 'Not marking h'
        #h_monotonicity_markings = ['up'] * len(h_tokens)
        #h_marked_tokens = dict(zip(h_tokens, h_monotonicity_markings))

    #classify_start = time()
    #Classifier.classify_edits(alignments)
    #print 'Classified in %s' % (time() - classify_start)
    #sequence_start = time()
    #sequenced_edits = Sequencer.sequence(
        #alignments, p_marked_tokens, h_marked_tokens)
    #print 'Sequenced in %s' % (time() - sequence_start)

    #project_start = time()
    #projected_atomic_entailments = Projector.get_projected_atomic_entailments(
        #sequenced_edits)
    #print 'Projected in %s' % (time() - project_start)
    #entailment = Joiner.join_atomic_entailments(projected_atomic_entailments)
    #print 'Pipeline %s' % (time() - pipeline_start)

    sequenced_edits, entailment_code = pipeline.get_entailment(
        p_tokens, h_tokens, alignments)
    manager_dict[premise] = {
        'premise': premise,
        'score': score,
        'entailment_code': entailment_code
        }
Esempio n. 2
0
def alignment_worker(premise, p_tokens, h_tokens, manager_dict):
    '''
    Given a premise and hypothesis, add their similarity score and entailment
    code to the process manager_dict
    '''
    aligner = al.Aligner()
    alignments, score = aligner.align(p_tokens, h_tokens, 'default')

    manager_dict[premise] = {
        'alignments': alignments,
        'score': score,
        'p_tokens': p_tokens
        }
 def slow(self, premises, hypothesis):
     aligner = al.Aligner()
     h_tokens = word_tokenize(hypothesis)
     all_start = time()
     codes = []
     for premise in premises:
         start_time = time()
         p_tokens = word_tokenize(premise)
         alignments, score = aligner.align(p_tokens, h_tokens, 'default')
         sequenced_edits, entailment_code = pipeline.get_entailment(
             p_tokens, h_tokens, alignments)
         codes.append(entailment_code)
         print 'one:', time() - start_time
     #print '\nCompleted serial processing'
     #for i in codes:
         #print i
     print 'Completed all, serially in', time() - all_start
Esempio n. 4
0
def process_image(image):
    align = aligner.Aligner()
    align.add_face_no_throttle(globals()["align_to"])

    episode = image.split("/")[-1].replace(".jpg", "").split("_")[1]
    if episode == globals()["last_episode"]:
        os.remove(image)
        return False

    try:
        align.add_face(image)
    except:
        print "removing " + image
        os.remove(image)
        return False

    align.align(image)
    globals()["last_episode"] = episode
    return True
Esempio n. 5
0
def run_experiments_with_aligner(roots1, roots2, initial_only=False):
    """Runs FLAGS.number_of_experiments experiments, using new aligner

  Note we assume that the input and output can be split on space!

  Args:
    roots1: A Roots class instance
    roots2: A Roots class instance
  """
    for i in range(FLAGS.number_of_experiments):
        zipped = [(c1.split(), c2.split())
                  for (c1, c2) in produce_paired_etyma(roots1, roots2)]
        success = 0
        the_aligner = aligner.Aligner()
        success = the_aligner.compute_alignments(
            zipped,
            max_zeroes=FLAGS.max_zeroes,
            max_allowed_mappings=FLAGS.max_allowed_mappings,
            print_mappings=FLAGS.print_mappings,
            initial_only=initial_only)
        print("RUN:\t{}\t{}".format(i, success))
        sys.stdout.flush()
Esempio n. 6
0
            print predicted_features
            weights = weights + (learning_rate *
            (gold_features - predicted_features))
            #logging.warning('Summed rated weights:\n%s' % weights)

        weights = weights / sqrt(sum([i ** 2 for i in weights]))
        #logging.warning('L2 normalization:\n%s' % weights)
        weights_history.append(weights)
        #logging.warning('\n\nWeights history:\n%s' % weights_history)

    weights_averaged = 1 / (learning_epochs
    - burn_in_epochs) * sum(weights_history[burn_in_epochs:])
    return weights_averaged


if __name__ == '__main__':
    aligner = aligner.Aligner()
    pickle_file = os.path.join(os.path.dirname(__file__),
    '../training_data/alignment_problems.p')
    training_set = open(pickle_file)
    training_data = pickle.load(training_set)
    training_set.close()
    t0 = time.clock()
    averaged_weights = learn_weights(training_data, 50, 10, 1, 0.8)
    t = time.clock() - t0
    print 'Trained in %s seconds' % t
    print '\n\n\nAveraged weights:\n', averaged_weights
    weights_file = open('../training_data/weights.p', 'w+b')
    pickle.dump(averaged_weights, weights_file)
    weights_file.close()
Esempio n. 7
0
this_stepfile_name = pcb_project + ".step"
this_stepfile = Path(__file__).parent.parent.parent / "electronics" / pcb_project / "3dOut" / this_stepfile_name
crossbar = tb.u.import_step(this_stepfile)
crossbar_pcb_top_height = 19.5  # from crossbar PCB design

# get the adapter PCB step
adapter = chamber.adapter
adapter_width = chamber.adapter_width

# build an alignment endblock
ablock = tb.endblock.build(adapter_width=adapter_width, horzm3s=True, align_bumps=True, special_chamfer=1.6)
ablock = to_holder(ablock, chamber_floor)
ablock = ablock.translate((0, tb.endblock.height / 2, 0))

# build the aligner
ac = aligner.Aligner(tb)
al = ac.build()
al = to_holder(al, chamber_floor)
al = al.translate((0, tb.endblock.height, 0))
al = al.rotate((0, 0, 0), (0, 1, 0), -90)
ablock.add(al)  # put them on the same workplane

# build an endblock
block = tb.endblock.build(adapter_width=adapter_width, horzm3s=True, align_bumps=True, special_chamfer=1.6)
block = to_holder(block, chamber_floor)
block = block.translate((0, tb.endblock.height / 2, 0))

gas_plate_thickness = 2  # thickness of the plate we'll use to redirect the gas
block_scrunch = 5.8  # move the blocks a further amount towards the center
blockA = block.translate(
    (