Beispiel #1
0
    def __init__(self, x_start, y_start, x_goal, y_goal):
        self.path = []

        self.s_start = State(x_start, y_start, Pair(0, 0))
        self.s_goal = State(x_goal, y_goal, Pair(0, 0))

        self.clear_fields()
Beispiel #2
0
def scheme_read(src):
    """Read the next expression from SRC, a Buffer of tokens.

    >>> scheme_read(Buffer(tokenize_lines(['nil'])))
    nil
    >>> scheme_read(Buffer(tokenize_lines(['1'])))
    1
    >>> scheme_read(Buffer(tokenize_lines(['true'])))
    True
    >>> scheme_read(Buffer(tokenize_lines(['(+ 1 2)'])))
    Pair('+', Pair(1, Pair(2, nil)))
    """
    if src.current is None:
        raise EOFError
    val = src.pop_first()  # Get and remove the first token
    if val == 'nil':
        # BEGIN PROBLEM 2
        "*** YOUR CODE HERE ***"
        return nil
        # END PROBLEM 2
    elif val == '(':
        # BEGIN PROBLEM 2
        "*** YOUR CODE HERE ***"
        return read_tail(src)
        # END PROBLEM 2
    elif val == "'":
        # BEGIN PROBLEM 3
        "*** YOUR CODE HERE ***"
        return Pair('quote', Pair(scheme_read(src), nil))
        # END PROBLEM 3
    elif val not in DELIMITERS:
        return val
    else:
        raise SyntaxError('unexpected token: {0}'.format(val))
Beispiel #3
0
    def move(self):
        """Move self to a new position in its world, by adding the x and y
        coordinates of its velocity vector to the x, y coordinates of its
        current position."""

        width = self._world.get_width()
        height = self._world.get_height()

        new_x = self._position[0] + self._velocity[0]
        new_x = min(max(0, new_x), width - 1)
        new_y = self._position[1] + self._velocity[1]
        new_y = min(max(0, new_y), height - 1)

        if self._world[new_x, new_y] == None:
            self._world[new_x, new_y] = self
            del self._world[self._position.get()]
            self._position = Pair(new_x, new_y)
            self._turtle.goto(new_x, new_y)

        # If the boid is approaching a boundary of world, have it turn
        #   in the next step:
        if (self._velocity[0] < 0 and new_x < 5) or \
           (self._velocity[0] > 0 and new_x > width - 5) or \
           (self._velocity[1] < 0 and new_y < 5) or \
           (self._velocity[1] > 0 and new_y > height - 5):
            self._velocity.turn(
                TURN_ANGLE)  # Turn angle is a constant in the sim
Beispiel #4
0
    def get_candidates(self, word, gold=False, top=False):
        candidates = set()
        if self.supervised:
            if word in self.gold_parents:
                candidates.add(self.gold_parents[word])
            if gold:
                return candidates

        if word in self.candidates_cache: return self.candidates_cache[word]

        candidates.add((word, 'STOP'))
        if len(word) < 3: return candidates
        for pos in xrange(1, len(word)):
            parent = word[:pos]
            if self.compounding and parent in self.word_cnt and word[pos:] in self.word_cnt:
                if self.word_cnt[parent] >= self.freq_thresh and self.word_cnt[word[pos:]] >= self.freq_thresh:
                    candidates.add(((parent, word[pos:]), 'COM_LEFT'))
                    candidates.add(((parent, word[pos:]), 'COM_RIGHT'))
            if 2 * len(parent) >= len(word):
                pair = Pair(word, parent, 'SUFFIX')
                suf, _ = pair.get_affix_and_transformation()
                if not self.pruner or suf not in self.pruner['suf']:
                    candidates.add((parent, 'SUFFIX'))
                if self.transform:
                    if pos < len(word) - 1 and word[pos - 1] == word[pos]:
                        pair = Pair(word, parent, 'REPEAT')
                        suf, trans = pair.get_affix_and_transformation()
                        if not self.pruner or suf not in self.pruner['suf']:
                            if not self.pruner or trans not in self.pruner['REPEAT']:
                                candidates.add((parent, 'REPEAT'))
                    if parent[-1] in self.alphabet:
                        for char in self.alphabet:
                            if char == parent[-1]: continue
                            new_parent = parent[:-1] + char
                            if self.get_similarity(new_parent, word) > 0.2:
                                pair = Pair(word, new_parent, 'MODIFY')
                                suf, trans = pair.get_affix_and_transformation()
                                if not self.pruner or suf not in self.pruner['suf']:
                                    if not self.pruner or trans not in self.pruner['MODIFY']:
                                        candidates.add((new_parent, 'MODIFY'))
                    if pos < len(word) - 1 and word[pos:] in self.suffixes:
                        for char in self.alphabet:
                            new_parent = parent + char
                            if word == new_parent: continue
                            if new_parent in self.word_cnt:
                                pair = Pair(word, new_parent, 'DELETE')
                                suf, trans = pair.get_affix_and_transformation()
                                if not self.pruner or suf not in self.pruner['suf']:
                                    if not self.pruner or trans not in self.pruner['DELETE']:
                                        candidates.add((new_parent, 'DELETE'))
            parent = word[pos:]
            if len(parent) * 2 >= len(word):
                pair = Pair(word, parent, 'PREFIX')
                pre, _ = pair.get_affix_and_transformation()
                if not self.pruner or pre not in self.pruner['pre']:
                    candidates.add((parent, 'PREFIX'))
        self.candidates_cache[word] = candidates
        return candidates
Beispiel #5
0
 def _check_targetrc(self, pair):
     rcpair = Pair()
     rcpair.set_from_data(pair.identifier,
                          reverse_complement(pair.r1.original_seq),
                          reverse_complement(pair.r2.original_seq))
     if self._run.cotrans:
         rcpair.r2.linker_start = pair.r2.linker_start
     self._find_matches(rcpair)
     if rcpair.matched or (self._run.cotrans
                           and self._cotrans_find_short_matches(rcpair)):
         self.counters.dna_residual_pairs += 1
 def list(args, env):
   p = None
   pt = None
   while len(args) > 0:
     o = Native.ARG(args)
     if p == None:
       p = Pair(o, None)
       pt = p
     else:
       pt.cdr = Pair(o, None)
       pt = pt.cdr
   
   return p
Beispiel #7
0
    def get_predecessors(self, u):
        s = []

        tmp = State(u.x + 1, u.y, Pair(-1, -1))
        if not self.occupied(tmp):
            s.append(tmp)
        tmp = State(u.x + 1, u.y + 1, Pair(-1, -1))
        if not self.occupied(tmp):
            s.append(tmp)
        tmp = State(u.x, u.y + 1, Pair(-1, -1))
        if not self.occupied(tmp):
            s.append(tmp)
        tmp = State(u.x - 1, u.y + 1, Pair(-1, -1))
        if not self.occupied(tmp):
            s.append(tmp)
        tmp = State(u.x - 1, u.y, Pair(-1, -1))
        if not self.occupied(tmp):
            s.append(tmp)
        tmp = State(u.x - 1, u.y - 1, Pair(-1, -1))
        if not self.occupied(tmp):
            s.append(tmp)
        tmp = State(u.x, u.y - 1, Pair(-1, -1))
        if not self.occupied(tmp):
            s.append(tmp)
        tmp = State(u.x + 1, u.y - 1, Pair(-1, -1))
        if not self.occupied(tmp):
            s.append(tmp)

        return s
 def match_feature(self):
     """
     Brute force feature Matching
     :return: None
     """
     number_of_image = len(self.frame_set)
     for i in range(number_of_image):
         for j in range(i+1, number_of_image):
             temp_pair = Pair(self.frame_set[i], self.frame_set[j])
             feature_matcher = cv2.BFMatcher(cv2.NORM_L2SQR, True)
             all_matches = feature_matcher.match(
                 self.frame_set[i].feature_descriptor,
                 self.frame_set[j].feature_descriptor)
             temp_pair.matches = all_matches
             self.pair_set.append(temp_pair)
Beispiel #9
0
def preprocess(xml_file='',
               senna_path='/media/raid-vapnik/tools/senna/',
               tt_path='/home/tools/treetagger/'):
    entailment_pairs = []
    tree = ET.parse(xml_file)
    pairs = tree.getroot()
    (filepath, filename) = os.path.split(xml_file)
    (name, extension) = os.path.splitext(filename)
    for pair in pairs:
        attrib = pair.attrib
        id = attrib['id']
        value = attrib['entailment']
        task = attrib['task']
        texts = pairs.findall("./pair[@id='%s']/t" % id)
        hypos = pairs.findall("./pair[@id='%s']/h" % id)
        text = texts[0].text
        hypo = hypos[0].text
        features_text = run_senna(text, id, name, senna_path, tt_path)
        features_hypo = run_senna(hypo, id, name, senna_path, tt_path)
        pair = Pair(id, text, hypo, value, task, features_text, features_hypo)
        entailment_pairs.append(pair)
    pickle_name = '%s.pickle' % name
    with open(pickle_name, 'w') as f:
        pickle.dump(entailment_pairs, f)
    f.close()
    return
Beispiel #10
0
    def measure(self, n=16, corrections=False):

        serial = input(_C.YEL +
                       'Insert 1st electrode with calibration ring and scan serial: ' +
                       _C.ENDC)
        electrodes = []
        times = []
        for i in range(2):
            if i == 1:
                input(_C.YEL + 'Insert 2nd electrode with calibration and press [Enter]')
            t0 = time.time()
            CoupledCapture(n=n, directory='combined', stp=self.stp, cam=self.cam)
            print(_C.CYAN + _C.BOLD + 'Evaluating electrode' + _C.ENDC)
            spiral, calibration = CombinedSequence(n=n, directory='hardware/combined', env=env)
            print(_C.CYAN + _C.BOLD + 'Measurement completed in ' + str(round(time.time() - t0, 2)) + 's' + _C.ENDC)

            localElectrode = Electrode(serial, spiral, calibration)
            electrodes.append(copy.copy(localElectrode))
            times.append(time.time() - t0)

        print(_C.CYAN + _C.BOLD + 'Pair completed in ' +
              str(round(sum(times), 2)) + 's' + _C.ENDC)

        pair = Pair(env=env, electrodes=tuple(electrodes), serial=serial, corrections=corrections)
        return pair
Beispiel #11
0
def read_tail(src):
    """Return the remainder of a list in SRC, starting before an element or ).

    >>> read_tail(Buffer(tokenize_lines([')'])))
    nil
    >>> read_tail(Buffer(tokenize_lines(['2 3)'])))
    Pair(2, Pair(3, nil))
    """
    try:
        while src.end_of_line():
            src.pop_first()
        if src.current is None:
            raise SyntaxError('unexpected end of file')
        elif src.current == ')':
            # BEGIN PROBLEM 2
            "*** YOUR CODE HERE ***"
            src.pop_first()
            return nil
            # END PROBLEM 2
        else:
            # BEGIN PROBLEM 2
            "*** YOUR CODE HERE ***"
            first = scheme_read(src)
            second = read_tail(src)
            pair = Pair(first, second)
            return pair
            # END PROBLEM 2
    except EOFError:
        raise SyntaxError('unexpected end of file')
Beispiel #12
0
def preprocess(parc_file='',
               score_file='',
               task='sts',
               senna_path='/media/raid-vapnik/tools/senna/',
               tt_path='/home/tools/treetagger/'):
    entailment_pairs = []
    scores = load_scores(score_file)
    (filepath, filename) = os.path.split(parc_file)
    (name, extension) = os.path.splitext(filename)
    with open(parc_file, 'r') as parc_f:
        id = 0
        for line in parc_f:
            if not line.strip():
                continue
            line = line.rstrip('\n')
            (text, hypo) = line.split('|||')
            features_text = run_senna(text, id, name, senna_path, tt_path)
            features_hypo = run_senna(hypo, id, name, senna_path, tt_path)
            value = scores.popleft()
            id += 1
            pair = Pair(id, text, hypo, value, task, features_text,
                        features_hypo)
            entailment_pairs.append(pair)
        (filepath, filename) = os.path.split(parc_file)
        (pickle_name, extension) = os.path.splitext(filename)
        pickle_name = '%s.pickle' % pickle_name
        with open(pickle_name, 'w') as f:
            pickle.dump(entailment_pairs, f)
            f.close()
    return
    def sexp(tokens):
        o = tokens.pop(0)

        if len(tokens) == 0:
            return None
        elif isinstance(o, str) and o == "\"":  #quote
            nx = tokens.pop(0)
            ss = nx
            while len(tokens) > 0 and nx != "\"":
                ss += nx
                nx = tokens.pop(0)

            if nx != "\"":
                raise ValueError("Unterminated string constant")

            return ss
        elif isinstance(o, str) and o == "(":
            return Parse.next_list(tokens)
        elif isinstance(o, str) and o == "'":  #casiquote
            return Pair("quote", Parse.sexp(tokens))
        else:
            if o.isnumeric() or Parse.is_valid_decimal(o):
                if ("." in o):
                    return float(o)
                else:
                    return int(o)

            return o
    def next_list(tokens):
        cur = Parse.sexp(tokens)

        p = None
        pt = None
        while len(tokens) > 0 and cur != None and cur != ")":
            if p == None:
                p = Pair(cur, None)
                pt = p
            else:
                pt.cdr = Pair(cur, None)
                pt = pt.cdr

            cur = Parse.sexp(tokens)

        return p
Beispiel #15
0
    def update_cell(self, x, y, val):
        u = State(x, y, Pair(0, 0))

        if u == self.s_start or u == self.s_goal:
            return

        self.make_new_cell(u)
        self.cell_hash[u].cost = val
        self.update_vertex(u)
 def match_feature(self):
     """
     Brute force feature Matching
     :return: None
     """
     number_of_image = len(self.frame_set)
     self._read_projection_matrix()
     for i in range(number_of_image):
         for j in range(i + 1, number_of_image):
             temp_pair = Pair(self.frame_set[i], self.frame_set[j])
             temp_pair.projection_matrix_1 = self.projection_matrix[i]
             temp_pair.projection_matrix_2 = self.projection_matrix[j]
             feature_matcher = cv2.BFMatcher(cv2.NORM_L1, True)
             all_matches = feature_matcher.match(
                 self.frame_set[i].feature_descriptor,
                 self.frame_set[j].feature_descriptor)
             temp_pair.matches = all_matches
             self.pair_set.append(temp_pair)
Beispiel #17
0
def map_explosions(pair: Pair, explosions: list = []) -> None:
    if pair is None:
        return
    else:
        if pair.get_depth() >= 4 and pair.value is None:
            explosions.append(pair)
        else:
            map_explosions(pair.left, explosions)
            map_explosions(pair.right, explosions)
Beispiel #18
0
    def Import(self, file):
        f = open(file, 'r')
        status = 'outside'
        current = None
        stored = []
        linec = 0
        try:
            while True:
                line = f.readline().strip().replace('\n', '')
                linec = linec + 1

                if line.startswith('<chat>'):
                    status = self.check(status, 'outside', 'chat', linec)
                    line = line[6:]
                if line.startswith('</chat>'):
                    status = self.check(status, 'chat', 'outside', linec)
                    break
                if line.startswith('<message>'):
                    status = self.check(status, 'chat', 'message', linec)
                    current = bufferedMsg()
                if line.startswith('</message>'):
                    status = self.check(status, 'message', 'chat', linec)
                    stored.append(current)
                    current = None
                if line.startswith('<me>'):
                    status = self.check(status, 'message', None, linec)
                    current.isMe = line[4:5] == '1'
                if line.startswith('<media>'):
                    status = self.check(status, 'message', None, linec)
                    current.hasMedia = True
                if line.startswith('<msg>'):
                    status = self.check(status, 'message', None, linec)
                    current.text = line[5:-6]
                if line.startswith('<date>'):
                    status = self.check(status, 'message', None, linec)
                    # ignore date for now
                    continue
            f.close()
            if status != 'outside' or current:
                print('this file is malformed, try to continue. ', status)
            print('\n', len(stored), 'Chat messages read. Processing...')

            stored.reverse()

            saved = []  # list of all pairs to return
            for i in range(1,
                           len(stored) -
                           1):  # 0. muss nicht angeschaut werden.
                #  print(stored[i])
                if stored[i].isMe and not stored[i].hasMedia:
                    if not stored[i - 1].isMe and not stored[i - 1].hasMedia:
                        saved = addPair(
                            Pair(stored[i - 1].text, stored[i].text), saved)
            return saved
        except Exception as e:
            print('unexpected Error', e, status)
        return []
 def match_feature(self):
     """
     Brute force feature Matching
     :return: None
     """
     number_of_image = len(self.frame_set)
     self._read_projection_matrix()
     for i in range(number_of_image):
         for j in range(i+1, number_of_image):
             temp_pair = Pair(self.frame_set[i], self.frame_set[j])
             temp_pair.projection_matrix_1 = self.projection_matrix[i]
             temp_pair.projection_matrix_2 = self.projection_matrix[j]
             feature_matcher = cv2.BFMatcher(cv2.NORM_L1, True)
             all_matches = feature_matcher.match(
                 self.frame_set[i].feature_descriptor,
                 self.frame_set[j].feature_descriptor)
             temp_pair.matches = all_matches
             self.pair_set.append(temp_pair)
Beispiel #20
0
def associateDescriptionsWithImages(descriptionsFolder, imagesFolder):
    pairs = []
    descriptionsFolderContent = descriptionsFolder.getListOfFiles()
    for descriptionFile in descriptionsFolderContent:
        imageFile = findCorrespondingImage(descriptionFile, imagesFolder)
        if (imageFile is not None):
            pair = Pair(descriptionFile, imageFile)
            pairs.append(pair)

    return pairs
Beispiel #21
0
 def _segment_iter(self, node):
     type_ = self.types[node]
     parent = self.parents[node]
     pair = Pair(node, parent, type_)
     if type_ == 'STOP': return Segment(node, node, 'stem')
     if type_ in ['COM_LEFT', 'COM_RIGHT', 'HYPHEN']:
         p1, p2 = self.parents[node]
         return self._segment_iter(p1).splice(self._segment_iter(p2))
     else:
         return self._segment_iter(parent).extend(pair)
Beispiel #22
0
def create_random_pairs_help(participants: []):
    not_yet_receiver = participants.copy()
    pairs = []
    for imp in participants:
        receiver = not_yet_receiver[randint(0, len(not_yet_receiver) - 1)]
        try:
            pairs.append(Pair(imp, receiver))
        except ValueError:
            return None
        not_yet_receiver.remove(receiver)
    return pairs
Beispiel #23
0
 def __init__(self, env, ord):
     #cria uma dupla aleatoria, que sera o ponto de inicio da tempera
     self.pair = Pair()
     #inicializa variaveis de ambiente aleatórias
     self.environment = env
     #inicializa variaveis da ordem de entrega aleatórias
     self.order = ord
     self.tempo = 0
     self.iteracoes = 100000
     #inicializa o grafico, com o plot da curva de risco
     self.create_plt()
Beispiel #24
0
    def __init__(self, myworld):
        """Construct a boid at a random position in the given world.

        Args:
            myworld (World object): a World object.
        """

        self._world = myworld
        (x, y) = (random.randrange(self._world.get_width()),
                  random.randrange(self._world.get_height()))
        while self._world[x, y] != None:
            (x, y) = (random.randrange(self._world.get_width()),
                      random.randrange(self._world.get_height()))
        self._position = Pair(x, y)
        self._world[x, y] = self
        self._velocity = Vector(
            (random.uniform(-1, 1), random.uniform(-1, 1))).unit()
        self._turtle = turtle.Turtle()
        self._turtle.speed(0)
        self._turtle.up()
        self._turtle.setheading(self._velocity.angle())
Beispiel #25
0
def create_pairs(g, r):
    givers = g[:]
    receivers = r[:]
    pairs = []
    for giver in givers:
        try:
            receiver = choose_receiver(giver, receivers)
            receivers.remove(receiver)
            pairs.append(Pair(giver, receiver))
        except:
            return create_pairs(g, r)
    return pairs
Beispiel #26
0
    def computePaires(self, instances):
        maxDelta = 0
        result = Pair()
        paires = []

        for i in range(self.genNumber):
            if i == self.classIndex:
                None
            else:
                j = i + 1
                while j < self.genNumber:
                    j = j + 1
                    if j == self.classIndex:
                        None
                    result = computeSingleDelta(instances,i,j)
                    if maxDelta < result.getDelta():
                        maxDelta = result.getDelta()
                        paires.clear()
                        paires.append(result)
                    if maxDelta == result.getDelta():
                        paires.append(result)
Beispiel #27
0
    def _worker(self, worker_id):
        try:
            processor = self._processor
            processor.reset_counts()
            if self._pair_db:
                self._pair_db.worker_id = worker_id
            writeback = bool(self._result_set_id)
            tagged = processor.uses_tags
            use_quality = self._run._parse_quality
            pair = Pair()
            while True:
                pairs = self._pairs_to_do.get()
                if not pairs:
                    break
                results = []
                for lines in pairs:
                    pair.set_from_data(lines[3], str(lines[1]), str(lines[2]), lines[0])
                    if use_quality:
                        pair.r1.quality = str(lines[4])
                        pair.r2.quality = str(lines[5])
                    if self.force_mask:
                        pair.set_mask(self.force_mask)
                    processor.process_pair(pair)
                    #if pair.failure:
                    #    print('FAIL: {}'.format(pair.failure))
                    if writeback:
                        results.append(self._make_result(lines[3], pair, tagged))

                if writeback:
                    self._results.put(results)

                if not self._run.quiet:
                    sys.stdout.write('.')#str(worker_id))
                    sys.stdout.flush()

            self._pairs_done.put(processor.counters.count_data())
        except:
            print("**** Worker exception, aborting...")
            raise
Beispiel #28
0
    def Import(self, file):
        saved = []
        f = open(file, 'r')
        i = 0
        while True:
            line = f.readline()
            i = i + 1
            if not line:
                break
            if '\0' in line:
                line = line.rstrip()  # remove trailing newline
                q, a = line.split('\0', 1)
                asplit = a.split('\0')
                pair = Pair(q, None)
                for awns in asplit:
                    pair.addawns(awns)

                saved.append(pair)
            else:
                print('wired line:', i)
        f.close()
        return saved
def create_pairs(matrixes, parts):
    small_pair_lists = []
    for i in range(parts):
        small_pair_lists.append([])

    j = 0
    for i in range(len(matrixes)):
        if j == parts:
            j = 0
        small_pair_lists[j].append(Pair(matrixes[i]))
        j = j + 1

    return small_pair_lists
Beispiel #30
0
def get_positive_pairs(folder, all_pairs):
    files1 = os.listdir(folder)
    shuffle(files1)
    last_len = len(all_pairs)
    files1 = files1[:83]
    for file1 in files1:
        pair_count = 0
        for file2 in files1[files1.index(file1) + 1:]:
            if file1 == file2:  # not necessary, actually
                continue
            pair_count += 1
            par = Pair(folder + '/' + file1, folder + '/' + file2, 1)  # Same class
            all_pairs.append(par)
Beispiel #31
0
    def update_goal(self, x, y):
        to_add = []
        for state in self.cell_hash:
            if not self.close(self.cell_hash[state], self.STRAIGHT_DIST):
                to_add.append(
                    Pair(Point(state.x, state.y), self.cell_hash[state].cost))

        self.s_goal.x = x
        self.s_goal.y = y

        self.clear_fields()

        for p in to_add:
            self.update_cell(p.first().x, p.first().y, p.second())
Beispiel #32
0
    def get_successors(self, u):
        s = []

        if self.occupied(u):
            return s

        s.append(State(u.x + 1, u.y, Pair(-1, -1)))
        s.append(State(u.x + 1, u.y + 1, Pair(-1, -1)))
        s.append(State(u.x, u.y + 1, Pair(-1, -1)))
        s.append(State(u.x - 1, u.y + 1, Pair(-1, -1)))
        s.append(State(u.x - 1, u.y, Pair(-1, -1)))
        s.append(State(u.x - 1, u.y - 1, Pair(-1, -1)))
        s.append(State(u.x, u.y - 1, Pair(-1, -1)))
        s.append(State(u.x + 1, u.y - 1, Pair(-1, -1)))

        return s
Beispiel #33
0
    def run_simple(self, pair_iterator):

        quiet = self._run.quiet
        run_limit = self._run._run_limit
        more_pairs = True
        pair_db = self._pair_db
        writeback = bool(self._result_set_id)
        sam = bool(self._run.generate_sam)
        channel_reads = bool(self._run.generate_channel_reads)
        use_quality = self._run._parse_quality
        total = 0
        if writeback:
            result_set_id = self._result_set_id

        processor = self._processor
        if self._pair_db:
            self._pair_db.worker_id = 0
        tagged = processor.uses_tags
        pair = Pair()

        if sam:
            sam_writer = SamWriter(self._run.generate_sam, processor._targets.targets)
        if channel_reads:
            plus_writer = FastqWriter('R1_plus.fastq', 'R2_plus.fastq')
            minus_writer = FastqWriter('R1_minus.fastq', 'R2_minus.fastq')

        while more_pairs:
            try:
                while True:
                    pair_info = next(pair_iterator)
                    if not quiet:
                        sys.stdout.write('^')
                        sys.stdout.flush()
                    results = []
                    for lines in pair_info:
                        pair.set_from_data(lines[3], str(lines[1]), str(lines[2]), lines[0])
                        if use_quality:
                            pair.r1.quality = str(lines[4])
                            pair.r2.quality = str(lines[5])
                        if self.force_mask:
                            pair.set_mask(self.force_mask)

                        try:
                            processor.process_pair(pair)
                        except:
                            print("**** Error processing pair: {} / {}".format(pair.r1.original_seq, pair.r2.original_seq))
                            raise

                        if sam:
                            sam_writer.write(pair)
                        if channel_reads and pair.has_site:
                            if pair.mask.chars == self._run.masks[0]:
                                plus_writer.write(pair)
                            else:
                                minus_writer.write(pair)

                        total += pair.multiplicity
                        if writeback:
                            results.append(self._make_result(lines[3], pair, tagged))

                    if not quiet:
                        sys.stdout.write('v')
                        sys.stdout.flush()

                    if results:
                        pair_db.add_results(self._result_set_id, results)
                        if not quiet:
                            sys.stdout.write('.')
                            sys.stdout.flush()

                    if run_limit and total > run_limit:
                        raise StopIteration()

            except StopIteration:
                more_pairs = False

        if not self._run.quiet:
            print("\nAggregating data...")

        processor.counters.total_pairs = total
        if self._pair_db:
            processor.counters.unique_pairs = self._pair_db.unique_pairs()
Beispiel #34
0
    def generate_interactions_o(self,interaction_name,bond="harmonic",SPB=False,radius=10,cutoff=1.15,reducedv_factor=1,khun=1.):

        if bond == "harmonic" :
            temp1 = "bond_style      harmonic\n"
            temp2 = "bond_coeff %i %.2f %.2f"
            ene = 350
        if bond == "fene":
            if SPB:
                temp1 = "bond_style hybrid harmonic fene\n"
                temp2 = "bond_coeff %i fene %.2f %.2f %.2f %.2f"
            else:
                temp1 = "bond_style fene\n"
            
                temp2 = "bond_coeff %i %.2f %.2f %.2f %.2f"
            ene = 30 * 1
        Bond = [temp1]
        if bond == "fene":
            Bond.append("special_bonds fene\n")
        ene_ratio=1#.35
        #Bond.append("special_bonds 0.0 1.0 1.0\n")
        if SPB and False:
            #print radius
            Pair = ["pair_style  hybrid lj/cut 3.0 gauss/cut  %.2f \n"%(2*radius) + "pair_modify shift yes\n"]
        else:
            Pair = ["pair_style   lj/cut 1.4 \n" + "pair_modify shift yes\n"]
        Angle = ["angle_style cosine/delta\n"] 
        Angle = ["angle_style harmonic\n"]
        keyl = range(1,len(self.natom)+1)
        for t1 in keyl:
            for t2 in keyl:
                if t2 >= t1:
                    if not self.liaison.has_key("%s-%s"%(t1,t2)):
                        print "Warning liaison between {0} and {1} not defined".format(t1,t2)
                    dist,tybe_b = self.liaison["%s-%s"%(t1,t2)]
                    if  cutoff is not None:                   
                        cut = dist*cutoff
                    else:
                        cut = dist*pow(2.,1/6.) 
                    odist = copy.deepcopy(dist)
                    #dist=1
                    if bond == "fene":
                        Bond.append(temp2 % (tybe_b,ene_ratio * ene/(dist*dist),1.5*dist,ene_ratio,dist) +"\n")
                    else:
                        Bond.append(temp2 % (tybe_b, ene,dist) +"\n")
                    dist = odist
                    precise =""
                    if SPB and False:
                        precise = "lj/cut" 
                    reduced = 1
                    if t1 == t2 and t1 == 3:
                        reduced = 1
                    else:
                        reduced = reducedv_factor 
                        
            
                    Pair.append("""pair_coeff	 %s %s %s %.1f %.2f  %.2f\n"""%(t1,t2,precise,ene_ratio,dist*reduced,cut*reduced))
                    if self.angle_def is not None and self.Angle != []:
                        for t3 in keyl:
                            if t3 >= t2:
                                dist,tybe_b = self.angle_def["%s-%s-%s"%(t1,t2,t3)]
                                k=khun/2. * dist # dist = 1 if no ribo involved else 0
                                Angle.append("angle_coeff %i %.3f 180.0\n"%(tybe_b,k))
        if SPB:
            if bond == "fene":
                Bond.append("bond_coeff %i harmonic 0 0 \n"%(self.liaison["spb"][1]))
                spbond = "bond_coeff %i harmonic %.1f %.1f\n"%(self.liaison["spb"][1],10,microtubule/realsigma)
            else:
                Bond.append("bond_coeff %i  0 0 \n"%(self.liaison["spb"][1]))
                spbond = "bond_coeff %i %.1f %.1f\n"%(self.liaison["spb"][1],10,microtubule/realsigma)
            n_i = len(diameter)/2
            for t1 in range(len(diameter) ):
                Pair.append("""pair_coeff	 %i %i %s 0. %.2f  %.2f\n"""%(t1+1,num_particle["spb"],precise,dist,cut))
              
            Pair.append("""pair_coeff	 %i %i %s 0. %.2f  %.2f\n"""%(num_particle["spb"],num_particle["spb"],precise,dist,cut))
        
        g = open(interaction_name,"w")
        g.write("".join(Bond)+"\n")
        g.write("".join(Pair)+"\n")
        if self.Angle != []:
            g.write("".join(Angle))