Пример #1
0
    def encode(self, input_value, encode_range=None):

        if encode_range is None:
            encode_range = self.valid_range
        if encode_range is None:
            raise ValueError("Encoding scalar values requires valid range (valid_range or encode_range parameter)")

        if not isinstance(input_value, np.ndarray) and input_value in HRR.mapping:
            return HRR.mapping[input_value]
        else:

            result = np.empty(self.size, dtype=float)

            if isinstance(input_value, float) or isinstance(input_value, numbers.Integral):
                result = self.permute(helpers.normalize(self.scalar_encoder(input_value, self.size, encode_range[0])))
                if self.visualize:
                    print("Encoded ", input_value)
                    self.plot(result)
            elif isinstance(input_value, (frozenset, list, np.ndarray, set, tuple)):
                result = self.permute(helpers.normalize(self.coordinate_encoder(input_value, encode_range)))
                if self.visualize:
                    print("Encoded ", input_value)
                    self.plot(result)
            else:
                result = VSA.encode(self, input_value)
            return result
Пример #2
0
    def get_alternative_domains(self):
        """
        This function is used to color the faces in a checker fashion.
        We put the domains transformed by words of even and odd length
        into two different lists. The face may be a regular m-polygon
        (which has type 0) or an uniform 2m-polygon (which has type 1),
        where 2pi/m is the angle between the two mirrors.
        """
        domain1 = []
        domain2 = []
        for i, p in enumerate(self.coords):
            # the two adjacent vertices and the middle points with them
            q1 = self.coords[(i + 1) % len(self.coords)]
            q2 = self.coords[i - 1]
            m1 = helpers.normalize((p + q1) / 2)
            m2 = helpers.normalize((p + q2) / 2)

            if self.type:
                if (len(self.word) + i) % 2 == 0:
                    domain1.append((m1, p, m2, self.center))
                else:
                    domain2.append((m1, p, m2, self.center))

            else:
                if len(self.word) % 2 == 0:
                    domain1.append((m1, p, self.center))
                    domain2.append((m2, p, self.center))
                else:
                    domain1.append((m2, p, self.center))
                    domain2.append((m1, p, self.center))

        return domain1, domain2
Пример #3
0
 def test_keep_suffix(self) -> None:
     """Tests that the * suffix is preserved."""
     relations = get_relations()
     relation = relations.get_relation("gazdagret")
     normalizers = relation.get_street_ranges()
     house_number = helpers.normalize(relation, "1*", "Budaörs út",
                                      normalizers)
     self.assertEqual([i.get_number() for i in house_number], ["1*"])
     house_number = helpers.normalize(relation, "2", "Budaörs út",
                                      normalizers)
     self.assertEqual([i.get_number() for i in house_number], ["2"])
Пример #4
0
def fft_covariance_distance(covariances, matrix):
    neglected_arousals = ['R', 'W', 'RANDOM', 'N1', 'N2', 'N3']
    t = 0.25
    differences = []
    for key in covariances.keys():
        if key not in neglected_arousals:
            fc = np.cov(normalize(covariances[key]['FFT']))
            fm = make_fourier(matrix)
            fmc = np.cov(normalize(fm))
            differences.append(np.linalg.norm(fmc - fc))
    return differences
Пример #5
0
 def get_following_velocity(self, MIN, beacons, ENV):
     F_o = gof(self.__K_o, MIN, ENV)
     F_btf = MIN.get_vec_to_other(self.btf)
     F_btf_aug = self.MAX_FOLLOWING_SPEED * normalize(
         F_btf) if np.linalg.norm(
             F_btf) > self.MAX_FOLLOWING_SPEED else F_btf
     F = F_o + F_btf_aug
     """
     TODO: return a non-zero net force when the MIN the deployed MIN is following is the target
     (to ensure than we travel further into the environment)
     """
     return self.MAX_FOLLOWING_SPEED * normalize(F)
Пример #6
0
def prep_random_data():
    X, y = read_random_pattern_data()
    X = normalize(X, 0, 255)
    y = normalize(y, 0, 255)

    y = y > 127
    y = y * 1.0
    X /= 255.0

    print(X.shape)
    np.save('data/X_rp.npy', X)
    print(y.shape)
    np.save('data/y_rp.npy', y)
Пример #7
0
    def train_one_step(self):
        """
        Execute one update for each of the networks. Note that if no positive advantage elements
        are returned the algorithm doesn't update the actor parameters.
        Args:
            None
        Returns:
            None
        """
        # transitions is sampled from replay buffer
        transitions = self.replay.sample_batch(self.batch_size)
        state_batch = normalize(transitions.s, self.obs_rms)
        action_batch = transitions.a
        reward_batch = normalize(transitions.r, self.ret_rms)
        next_state_batch = normalize(transitions.sp, self.obs_rms)
        terminal_mask = transitions.it
        # transitions is sampled from replay buffer

        # train critic and value
        self.critics.train(state_batch, action_batch, reward_batch,
                           next_state_batch, terminal_mask, self.target_value,
                           self.gamma, self.q_normalization)
        self.value.train(state_batch, self.target_actor, self.target_critics,
                         self.action_samples)

        # note that transitions.s represents the sampled states from the memory buffer
        states, actions, advantages = self._sample_positive_advantage_actions(
            state_batch)
        if advantages.shape[0]:
            self.actor.train(states, actions, advantages, self.mode, self.beta)

        update(self.target_actor, self.actor, self.tau)
        update(self.target_critics, self.critics, self.tau)
        update(self.target_value, self.value, self.tau)

        with self.actor.train_summary_writer.as_default():
            tf.summary.scalar('actor loss',
                              self.actor.train_loss.result(),
                              step=self.step)

        with self.critics.train_summary_writer.as_default():
            tf.summary.scalar('critic loss',
                              self.critics.train_loss.result(),
                              step=self.step)

        with self.value.train_summary_writer.as_default():
            tf.summary.scalar('value loss',
                              self.value.train_loss.result(),
                              step=self.step)

        self.step += 1
 def calc_bagofwords(self, centroids):
     """Calculate bag of words using the features
     added to an object."""
     for feature in self.features:
         try:
             labels, _ = vq(numpy.array(feature), centroids)
         except:
             continue
         bow = numpy.zeros(FEATURE_TYPES)
         for label in labels:
             bow[label] += 1
         self.addBow(bow)
         helpers.normalize(bow)
         self.bagofwords.append(bow)
Пример #9
0
def sense(color, grid, beliefs, p_hit, p_miss):
    new_beliefs = []

    #
    # TODO - implement this in part 2
    #
    for i in range(len(grid)):
        row=[]
        for j in range(len(grid[0])):
            hit=(color==grid[i][j])
            beliefs[i][j]=(p_hit*hit+(1-hit)*p_miss)*beliefs[i][j]
            row.append(beliefs[i][j])
        new_beliefs.append(row)
    normalize(new_beliefs)
    return new_beliefs
Пример #10
0
    def test_should_match_single_citation_with_characters_before_number(self):

        digest = (
            "decisao: e meramente processual e infraconstitucional questao relativa a cabimento da acao rescisoria. "
            "disso resulta a inviabilidade do re conforme os precedentes: agrags 259815, 253614, 216735, 214608, "
            "216871, 211226, 214360, 238557, 208060, 311483, dentre outros. alem disso, o re n.º 98765 suscita materia "
            "constitucional (art. 5, ii, art. 7, iii e art. 22, vi, todos da cf/88) que nao foi examinada no acordao "
            "recorrido nem opostos embargos declaratorios res 123, 456, e 789 para sanar a omissao (sumulas 282 e 356). "
            "nego seguimento ao agravo (cpc, art. 557). publique-se. brasilia, 22 de agosto de 2003. ministro nelson jobim relator"
        )

        match = re.search(self.singular.patterns[0], normalize(digest))
        self.assertTrue(match)
        match_string = match.string[match.start():match.end()]
        self.assertEqual(match_string, normalize('re n.º 98765'))
Пример #11
0
    def generate_povray_data(
            self,
            depth=100,
            maxcount=50000,
            cell_depth=None,
            cell_edges=10000,
            filename="./povray/honeycomb-data.inc",
            eye=(0, 0, 0.5),
            lookat=(0, 0, 0),
    ):
        self.G.init()
        self.word_generator = partial(self.G.traverse,
                                      depth=depth,
                                      maxcount=maxcount)
        self.fundamental_cells = self.get_fundamental_cells(
            cell_depth, cell_edges)
        init_edges = self.collect_fundamental_cell_edges()
        bar = tqdm.tqdm(desc="processing edges", total=maxcount)
        vertices = set()
        eye = np.array(eye)
        lookat = np.array(lookat)
        viewdir = helpers.normalize(lookat - eye)

        def add_new_edge(edge):
            p1 = self.project(edge[0])
            p2 = self.project(edge[1])
            if np.dot(p1 - eye, viewdir) > 0.5 or np.dot(p2 - eye,
                                                         viewdir) > 0.5:
                self.export_edge(f, p1, p2)
                self.num_edges += 1
                for v in [p1, p2]:
                    v = vround(v)
                    if v not in vertices:
                        vertices.add(v)
                        self.num_vertices += 1

        with open(filename, "w") as f:
            f.write("#declare camera_loc = {};\n".format(
                helpers.pov_vector(eye)))
            f.write("#declare lookat = {};\n".format(
                helpers.pov_vector(lookat)))
            for edge in init_edges:
                add_new_edge(edge)

            for word in self.word_generator():
                for edge in init_edges:
                    edge = [self.transform(word, v) for v in edge]
                    if self.is_new_edge(edge):
                        add_new_edge(edge)

                bar.update(1)
            bar.close()
            verts = "#declare num_vertices = {};\n"
            verts_coords = "#declare vertices = array[{}]{{{}}};\n"
            print("{} vertices and {} edges generated".format(
                self.num_vertices, self.num_edges))
            f.write(verts.format(self.num_vertices))
            f.write(
                verts_coords.format(self.num_vertices,
                                    helpers.pov_vector_list(vertices)))
Пример #12
0
def get_cancer_data(file_in):
    """
    Fetch the UCI data set on breast cancer characteristics 
    """

    data_text = open(file_in, 'r').read()
    data_rows = data_text.split('\n')
    data_rows = data_rows[0:-1]  #last line is blank

    x_headers = [
        'id', 'clump_thickness', 'unif_cell_size', 'unif_cell_shape',
        'marginal_adhesion', 'single_epithelial_cell_size', 'bare_nuclei',
        'bland_chrmatin', 'normal_nucleoli', 'mitoses', 'target'
    ]

    cat_variables = ['target']

    data_all = [row.split(',') for row in data_rows]

    output = pd.DataFrame(data_all, columns=x_headers)

    output = output.drop(['id'], axis=1)

    output = helpers.replace_missing_mode(output)

    for col in output:
        if col not in cat_variables:
            output[col] = [float(x) for x in output[col]]

    output = helpers.one_hot_encode(output, exclude=[])
    output = helpers.normalize(output)

    return (output)
Пример #13
0
def input_transforms(image, with_size, device):
    data = to_tensor(image).to(device)
    big_size = data.shape[-2:]
    downscale = downscaler(big_size, with_size)
    x_n = normalize(data).unsqueeze(0)
    x_nd = downscale(x_n)
    return x_nd, x_n
Пример #14
0
 def _fire(self, position):
     diff_x = position[0] - self.position[0]
     diff_y = position[1] - self.position[1]
     aim = helpers.normalize((diff_x, diff_y))
     l = Bot.fire_dist
     fire_pos = (self.position[0] + aim[0] * l, self.position[1] + aim[1] * l)
     return Pulse(position=fire_pos, direction=aim)
Пример #15
0
def get_vote_data(file_in):
    """
    Fetch and clean the UCI data set on US Representative vote records
    """

    data_text = open(file_in, 'r').read()
    data_rows = data_text.split('\n')
    data_rows = data_rows[0:-1]  #last line is blank

    x_headers = [
        'target', 'handicapped-infants', 'water-project-cost-sharing',
        'adoption-of-the-budget-resolution', 'physician-fee-freeze',
        'el-salvador-aid', 'religious-groups-in-schools',
        'anti-satellite-test-ban', 'aid-to-nicaraguan-contras', 'mx-missile',
        'immigration', 'synfuels-corporation-cutback', 'education-spending',
        'superfund-right-to-sue', 'crime', 'duty-free-exports',
        'export-administration-act-south-africa'
    ]

    cat_variables = ['target']

    data_all = [row.split(',') for row in data_rows]

    output = pd.DataFrame(data_all, columns=x_headers)

    output = helpers.replace_missing_mode(output)

    output = helpers.one_hot_encode(output, exclude=[])
    output = helpers.normalize(output)

    return (output)
Пример #16
0
def get_iris_data(file_in):
    """
    Fetch the UCI data set on physical characteristics of Iris species.
    """

    data_text = open(file_in, 'r').read()
    data_rows = data_text.split('\n')
    data_rows = data_rows[0:-2]  #last two lines are blank

    x_headers = [
        'sepal_length', 'sepal_width', 'petal_length', 'petal_width', 'target'
    ]

    cat_variables = ['target']

    data_all = [row.split(',') for row in data_rows]

    output = pd.DataFrame(data_all, columns=x_headers)

    output = helpers.replace_missing_mode(output)

    for col in output:
        print(col)
        if col not in cat_variables:
            output[col] = [float(x) for x in output[col]]

    output = helpers.one_hot_encode(output, exclude=[])
    output = helpers.normalize(output)

    return (output)
Пример #17
0
def create_PNG_dataset(imagePaths, augmentation=True, max_theta=60):

    imgArrays = nii2Numpy(imagePaths)

    if augmentation:
        for subjectImage, path in zip(np.rollaxis(imgArrays,3), imagePaths):
            path = Path(path)
            print("Processing {}".format(path.parents[0].parts[-1]))
            augmentedImg = rotate3D(subjectImage, max_theta)
            augmentedPath =  Path(str(path.parents[0]) + 'a')
            updateSubjectDf(augmentedImg,augmentedPath)


    else:
        for subjectImage, path in zip(np.rollaxis(imgArrays,3), imagePaths):
            path = Path(path)
            print("Processing {}".format(path.parents[0].parts[-1]))
            k=1
            for slice2D in np.rollaxis(subjectImage,2):
                pathOut = path.parent / 'png' / 'slice_{:03d}.png'.format(k)

                # if picture already exists, remove it first
                if pathOut.exists(): safely_remove_file(pathOut)

                slice2D = normalize(slice2D)
                # if sum of pixels is 0, skip slice
                if slice2D.sum() == 0:
                    continue

                create_png(slice2D,pathOut)
                print("Creating 2D slice number: {:03d}".format(k))
                k+=1
Пример #18
0
def get_glass_data(file_in):
    """
    Fetch the UCI data set on age of chemical characteristics of glass.
    """

    data_text = open(file_in, 'r').read()
    data_rows = data_text.split('\n')
    data_rows = data_rows[0:-1]  #last line is blank

    x_headers = [
        'id', 'RI', 'Na', 'Mg', 'Al', 'Si', 'K', 'Ca', 'Ba', 'Fe', 'target'
    ]

    cat_variables = ['target']

    data_all = [row.split(',') for row in data_rows]

    output = pd.DataFrame(data_all, columns=x_headers)

    output = helpers.replace_missing_mode(output)

    for col in output:
        print(col)
        if col not in cat_variables:
            output[col] = [float(x) for x in output[col]]

    output = output.drop(['id'], axis=1)

    output = helpers.one_hot_encode(output, exclude=[])
    output = helpers.normalize(output)

    return (output)
Пример #19
0
def init_rounds(word):
    won = False
    hanged = False

    tries = 7
    guessed_letters = []
    word = normalize(word)
    mapped_word_positions = map_positions(word)
    hidden_word = [PLACEHOLDER_LETTER for _ in word]

    while (not won and not hanged):
        print_round_start_message(hidden_word, tries)
        guess, guessed_letters, guessed_before = input_guess(guessed_letters)

        if (guessed_before):
            continue

        tries, hidden_word = check_guess(guess, hidden_word,
                                         mapped_word_positions, tries)

        won = PLACEHOLDER_LETTER not in hidden_word
        hanged = tries <= 0

    clear()
    if (won):
        print_victory_message()
    elif (hanged):
        print_defeat_message(word)
Пример #20
0
def attitude_from_alpha_delta(source, sat, t, vertical_angle_dev=0):
    """
    :param source: [Source object]
    :param sat: [satellite object]
    :param t: [float] time
    :param vertical_angle_dev: how much we deviate from zeta
    """
    Cu = source.unit_topocentric_function(sat, t)
    Su = np.array([1, 0, 0])
    if vertical_angle_dev == 0:
        vector, angle = helpers.get_rotation_vector_and_angle(Cu, Su)
        q_out = quaternion.from_rotation_vector(angle * vector)
    else:
        Cu_xy = helpers.normalize(np.array([Cu[0], Cu[1],
                                            0]))  # Cu on S-[xy] plane
        v1, a1 = helpers.get_rotation_vector_and_angle(Cu_xy, Su)
        q1 = quaternion.from_rotation_vector(v1 * a1)

        Su_xy = ft.rotate_by_quaternion(
            q1.inverse(), Su)  # Su rotated to be on same xy than Cu_xy
        v2, a2 = helpers.get_rotation_vector_and_angle(Cu, Su_xy)
        q2_dev = quaternion.from_rotation_vector(v2 *
                                                 (a2 + vertical_angle_dev))
        # deviaetd_Su = ft.rotate_by_quaternion(q2_dev.inverse(), Su_xy)
        q_out = q1 * q2_dev
        # angle -= 0.2
    return q_out
Пример #21
0
    def decodeCoordinate(self, memory=None, dim=1, return_list=False, suppress_value=None, decode_range=None):

        assert(dim == 1 or dim == 2 or dim == 3)

        if memory is None:
            memory = self.memory

        memory = helpers.normalize(memory)

        if decode_range is None:
            decode_range = self.valid_range
        if decode_range is None:
            raise ValueError("Decoding scalar values requires valid range (valid_range or decode_range parameter)")



        assert(len(decode_range) == dim)

        memory = self.reverse_permute(memory)

        if self.visualize:
            print("Output Reverse:")
            self.plot(np.reshape(memory,self.size))

        memory = helpers.smooth(helpers.reShape(memory, dim),self.window_ratio)
        l = helpers.sideLength(memory.size, dim)

        if self.visualize:
            print("Output Smooth pre:")
            self.plot(np.reshape(memory,self.size))

        if suppress_value is not None:
            memory = self.deductValue(memory,supress_value,HRR.valid_range)
            if self.visualize:
                print("Output Smooth (after suppression):")
                self.plot(np.reshape(memory,self.size))

        result = []

        if(self.peak_min == 0):
            self.peak_min = np.max(memory)/2

        while np.max(memory) > self.peak_min_ratio * abs(np.mean(memory)) + self.peak_min:

            spot = list(np.unravel_index(np.argmax(memory),memory.shape))

            for i in range(dim):
                spot[i] = helpers.reverse_scale(spot[i], l, decode_range[i])

            result.append((spot, 1))
            if return_list is False:
                return spot
            memory = self.deductValue(memory,spot,HRR.valid_range,dim, np.max(memory))
            if self.visualize:
                print("Output Post Deduction:")
                self.plot(np.reshape(memory,self.size))

        if len(result) == 0 and suppress_value is not None:
            return [(np.nan, 1)] if return_list else np.nan
        return result
Пример #22
0
def get_soy_data(file_in):
    """
    Fetch the UCI data set on diseases of soybean samples.
    """

    data_text = open(file_in, 'r').read()
    data_rows = data_text.split('\n')
    data_rows = data_rows[0:-1]  #last line is blank

    x_headers = [
        'date', 'plant-stand', 'precip', 'temp', 'hail', 'crop-hist',
        'area-damaged', 'severity', 'seed-tmt', 'germination', 'plant-growth',
        'leaves', 'leafspots-halo', 'leafspots-marg', 'leafspot-size',
        'leaf-shread', 'leaf-malf', 'leaf-mild', 'stem', 'lodging',
        'stem-cankers', 'canker-lesion', 'fruiting-bodies', 'external decay',
        'mycelium', 'int-discolor', 'sclerotia', 'fruit-pods', 'fruit spots',
        'seed', 'mold-growth', 'seed-discolor', 'seed-size', 'shriveling',
        'roots', 'target'
    ]

    cat_variables = ['target']

    data_all = [row.split(',') for row in data_rows]

    output = pd.DataFrame(data_all, columns=x_headers)

    output = helpers.replace_missing_mode(output)

    output = helpers.one_hot_encode(output, exclude=[])
    output = helpers.normalize(output)

    return (output)
Пример #23
0
 def test_not_in_range(self) -> None:
     """Tests when the number is not in range."""
     relations = get_relations()
     relation = relations.get_relation("gazdagret")
     normalizers = relation.get_street_ranges()
     house_numbers = helpers.normalize(relation, "999", "Budaörsi út",
                                       normalizers)
     self.assertEqual(house_numbers, [])
Пример #24
0
def normalize(raw_text):
    """
    Should return the normalized text given the raw text
    """
    normalized_text = helpers.normalize(raw_text)
    normalized_text = helpers.cleaning_and_removal(normalized_text)

    return normalized_text
Пример #25
0
def preprocessData(dataFilePath, mode):
    """Load data from a file, process and return indices, conversations and labels in separate lists
        Input:
            dataFilePath : Path to train/test file to be processed
            mode : "train" mode returns labels. "test" mode doesn't return labels.
        Output:
            indices : Unique conversation ID list
            conversations : List of 3 turn conversations, processed and each turn separated by the <eos> tag
            labels : [Only available in "train" mode] List of labels
    """
    indices = []
    conversations = []
    labels = []
    u1 = []
    u2 = []
    u3 = []
    with io.open(dataFilePath, encoding="utf8") as finput:
        finput.readline()
        for line in finput:
            line = line.strip().split('\t')
            line[1] = normalize(line[1])
            line[2] = normalize(line[2])
            line[3] = normalize(line[3])

            if mode == "train":
                # Train data contains id, 3 turns and label
                label = emotion2label[line[4]]
                labels.append(label)

            conv = ' '.join(line[1:4])

            u1.append(line[1])
            u2.append(line[2])
            u3.append(line[3])

            # Remove any duplicate spaces
            duplicateSpacePattern = re.compile(r'\ +')
            conv = re.sub(duplicateSpacePattern, ' ', conv)

            indices.append(int(line[0]))
            conversations.append(conv)

    if mode == "train":
        return indices, conversations, labels, u1, u2, u3
    else:
        return indices, conversations, u1, u2, u3
Пример #26
0
def see_imgs():
    X, y = read_data()
    X = normalize(X, 0, 255)
    y = normalize(y, 0, 255)
    X = X.reshape(-1, INPUT_SIZE, INPUT_SIZE)
    write_imgs_serially(X, base_path='tmp/all/input/')
    y = y.reshape(-1, OUTPUT_SIZE, OUTPUT_SIZE)
    write_imgs_serially(y, base_path='tmp/all/output/')
    del X, y

    X, y = read_random_pattern_data()
    X = normalize(X, 0, 255)
    y = normalize(y, 0, 255)
    X = X.reshape(-1, INPUT_SIZE, INPUT_SIZE)
    write_imgs_serially(X, base_path='tmp/all/rp_input/')
    y = y.reshape(-1, OUTPUT_SIZE, OUTPUT_SIZE)
    write_imgs_serially(y, base_path='tmp/all/rp_output/')
Пример #27
0
 def test_separator_interval_parity(self) -> None:
     """Tests the 5-8 case: means just 5 and 8 as the parity doesn't match."""
     relations = get_relations()
     relation = relations.get_relation("gazdagret")
     normalizers = relation.get_street_ranges()
     house_numbers = helpers.normalize(relation, "5-8", "Budaörs út",
                                       normalizers)
     self.assertEqual([i.get_number() for i in house_numbers], ["5", "8"])
Пример #28
0
 def test_happy(self) -> None:
     """Tests the happy path."""
     relations = get_relations()
     relation = relations.get_relation("gazdagret")
     normalizers = relation.get_street_ranges()
     house_numbers = helpers.normalize(relation, "139", "Budaörsi út",
                                       normalizers)
     self.assertEqual([i.get_number() for i in house_numbers], ["139"])
Пример #29
0
 def test_not_a_number(self) -> None:
     """Tests the case when the house number is not a number."""
     relations = get_relations()
     relation = relations.get_relation("gazdagret")
     normalizers = relation.get_street_ranges()
     house_numbers = helpers.normalize(relation, "x", "Budaörsi út",
                                       normalizers)
     self.assertEqual(house_numbers, [])
Пример #30
0
 def test_separator_semicolon(self) -> None:
     """Tests the case when ';' is a separator."""
     relations = get_relations()
     relation = relations.get_relation("gazdagret")
     normalizers = relation.get_street_ranges()
     house_numbers = helpers.normalize(relation, "1;2", "Budaörs út",
                                       normalizers)
     self.assertEqual([i.get_number() for i in house_numbers], ["1", "2"])
Пример #31
0
 def test_nofilter(self) -> None:
     """Tests the case when there is no filter for this street."""
     relations = get_relations()
     relation = relations.get_relation("gazdagret")
     normalizers = relation.get_street_ranges()
     house_numbers = helpers.normalize(relation, "1", "Budaörs út",
                                       normalizers)
     self.assertEqual([i.get_number() for i in house_numbers], ["1"])
Пример #32
0
 def motifs_in_region(self, region):
     seq = self.current_seq
     if self.method == 'motility':
         for hit in self.pwm.find(seq, threshold=self.jaspar_thresh):
             start, stop, strand, seq = hit
             yield helpers.normalize(self._hit_to_interval(hit, region), region)
     if self.method == 'biopython':
         seq_seq = Seq(seq)
         motif_len = len(self.motif)
         for pos, score in self.motif.search_pwm(seq_seq, threshold=self.jaspar_thresh):
             strand = '+'
             if pos < 0:
                 strand = '-'
                 pos = -pos
             start = pos
             stop = start + motif_len
             hit = (start, stop, strand, seq[start:stop])
             yield helpers.normalize(self._hit_to_interval(hit, region), region)
Пример #33
0
def manage_roles(edit_role_name=None):
    settings = check_and_initialize()
    form = helpers.deploy_custom_form('manage_roles')
    if request.method == 'POST' and form.validate_on_submit():
        role_name = helpers.slug(request.form.get('display_name'))
        existing_role = g.db.settings.find_one(
            {
                'roles.name': role_name
            }
        )
        if existing_role:
            flash(
                'Role already exists, please check the name and try again',
                'error'
            )
            form.display_name.errors.append('Duplicate role')
            return render_template(
                'admin/manage_roles.html',
                form=form,
                roles=settings.get('roles')
            )
        else:
            g.db.settings.update(
                {
                    '_id': settings.get('_id')
                }, {
                    '$push': {
                        'roles': {
                            'name': role_name,
                            'display_name': helpers.normalize(
                                request.form.get('display_name')
                            ),
                            'active': bool(request.form.get('status'))
                        }
                    }
                }
            )
            flash('Role successfully Added', 'success')
            return redirect(url_for('adminblueprint.manage_roles'))
    elif request.method == 'POST' and not (form.validate_on_submit()):
        flash(
            'Form validation failed. Please check the form and try again',
            'error'
        )
        return render_template(
            'admin/manage_roles.html',
            form=form,
            roles=settings.get('roles')
        )
    else:
        return render_template(
            'admin/manage_roles.html',
            form=form,
            roles=settings.get('roles')
        )
Пример #34
0
 def annotations(self, region):
     x = pybedtools.BedTool([region]).saveas()
     for symbol, annot in self._annotations.items():
         for hit in annot.intersect(x):
             hit = helpers.normalize(hit, region)
             match = [
                 '.' * hit.start,
                 symbol * len(hit),
                 '.' * (len(region) - hit.stop)
             ]
             yield ''.join(match)
Пример #35
0
def heatmapf(func, scale=10, boundary=True, cmap=None, ax=None,
             scientific=False, style='triangular', colorbar=True,
             permutation=None):
    """
    Computes func on heatmap partition coordinates and plots heatmap. In other
    words, computes the function on lattice points of the simplex (normalized
    points) and creates a heatmap from the values.

    Parameters
    ----------
    func: Function
        A function of 3-tuples to be heatmapped
    scale: Integer
        The scale used to partition the simplex
    boundary: Bool, True
        Include the boundary points or not
    cmap: String, None
        The name of the Matplotlib colormap to use
    ax: Matplotlib axis object, None
        The axis to draw the colormap on
    style: String, "triangular"
        The style of the heatmap, "triangular", "dual-triangular" or "hexagonal"
    scientific: Bool, False
        Whether to use scientific notation for colorbar numbers.
    colorbar: bool, True
        Show colorbar.
    permutation: string, None
        A permutation of the coordinates

    Returns
    -------
    ax, The matplotlib axis
    """

    # Apply the function to a simplex partition
    data = dict()
    for i, j, k in simplex_iterator(scale=scale, boundary=boundary):
        data[(i, j)] = func(normalize([i, j, k]))
    # Pass everything to the heatmapper
    ax = heatmap(data, scale, cmap=cmap, ax=ax, style=style,
                 scientific=scientific, colorbar=colorbar,
                 permutation=permutation)
    return ax
Пример #36
0
    def train(self, train_dir, test_dir, dataset_path=None, dump_dataset=True):
        testset = SupervisedDataSet(len(self.summarizer.get_features()), 1)
        min_maxs = [[100, 0] for i in range(len(self.summarizer.get_features()))]

        if dataset_path and dataset_path != 'None':
            dataset = load_from_file(dataset_path)
            min_maxs = load_from_file("meta_model.xml")  # sprawidzć ścieżke!
        else:
            dataset = SupervisedDataSet(len(self.summarizer.get_features()), 1)

            for root, dirs, files in os.walk(train_dir, topdown=False):
                for file_ds in self.process_dir(self.summarizer, root, files):
                    for ds in file_ds:
                        dataset.addSample(ds[0], ds[1])
                        min_maxs = self.update_min_maxs(min_maxs, ds[0])
                # break  # remove this !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

                # print min_maxs

            inp = []
            for d in dataset['input']:
                inp.append([normalize(val, min_maxs[i][0], min_maxs[i][1]) for i, val in enumerate(d)])

            dataset.setField("input", inp)
            # print dataset['input']


        ### TEMP
        # save_dataset_as_csv(dataset)

        if dump_dataset:
            save_to_file(dataset, "dataset.xml")

        if test_dir:
            for root, dirs, files in os.walk(test_dir, topdown=False):
                for file_ds in self.process_dir(self.summarizer, root, files):
                    for ds in file_ds:
                        testset.addSample(ds[0], ds[1])

        print "[Trainer] -> training..."
        save_to_file(min_maxs, self.features.replace("features.txt", "meta_model.xml"))

        self.train_method(self.summarizer, dataset, testset, self.features.replace("features.txt", "model.xml"))
Пример #37
0
    def _get_clean_html(self,token=True):
        cleaned = nltk.clean_html(self.text)

#        if self.debug:
#            print cleaned

        normalized = normalize(cleaned)

#        if self.debug:
#            print normalized

        if token:
            tok_text = tokenize(normalized)

#            if self.debug:
#                print tok_text

            return tok_text
        else:
            return normalized
Пример #38
0
 def _create_methods(klass, jsn):
     """A helper method that will populate this module's namespace
     with methods (parsed directlly from the Wordnik API's output)
     """
     endpoints = jsn['endPoints']
 
     for method in endpoints:
         path = method['path']
         for op in method['operations']:
             summary = op['summary']
             httpmethod = op['httpMethod']
             params = op['parameters']
             response = op['response']
 
             ## a path like: /user.{format}/{username}/wordOfTheDayList/{permalink} (GET)
             ## will get translated into method: user_get_word_of_the_day_list
             methodName  = helpers.normalize(path, httpmethod.lower())
             docs        = helpers.generate_docs(params, response, summary, path)
             method      = helpers.create_method(methodName, docs, params, path, httpmethod.upper())
             
             setattr( Wordnik, methodName, method )
Пример #39
0
 def eval_input(self, inputs):
     up_val, up_ar, extra_inputs = 0.0, 0.0, {}
     for input_type in self.input_coeff.keys():
         minmax = self.input_minmax[input_type]
         try:
             input_val = inputs[input_type]
         except KeyError:
             input_val = self.__dict__.get(input_type)()
             extra_inputs[input_type] = input_val
         input_norm = helpers.normalize(input_val, minmax[0],minmax[1])
         # make the interval symmetric [0,1] -> [-0.5,0.5]
         input_norm -= 0.5
         # scale the interval
         up_val += (input_norm * self.input_coeff[input_type][0])
         if isinstance(self.input_coeff[input_type][1],str):
             f_coeff = float(self.input_coeff[input_type][1].replace('f',''))
             up_ar += input_norm * f_coeff
         else:
             if (self.last_inputs):
                 diff = input_val - self.last_inputs[input_type]
                 # amplify differences:
                 # in a [0,100] range a change of 100/sensibility is enough
                 # to reach max possible arousal caused by the selected sensed value
                 try:
                     sensibility = self.input_coeff[input_type][2]
                 except IndexError:
                     sensibility = 1
                 diff = 0.5 * sensibility * ( float(diff) / (minmax[1] - minmax[0]))
                 # saturates
                 if (diff < -0.5):
                     diff = -0.5
                 elif (diff > 0.5):
                     diff = 0.5
                 up_ar += (diff * self.input_coeff[input_type][1])
     self.last_inputs = inputs.copy()
     self.last_inputs.update(extra_inputs)
     self.eval_state[0] = up_val
     self.eval_state[1] = up_ar
Пример #40
0
def run():
    input_data = simulation_conf["input_data"]
    n_cycles = len(input_data[list(input_data.keys())[0]])
    plot_data, added_obj_plot_data = {}, {}
    for plot_name in simulation_conf["plot"]:
        plot_data[plot_name], added_obj_plot_data[plot_name] = {}, False
        for y_data in simulation_conf["plot"][plot_name]:
            plot_data[plot_name][y_data[0]] = [[],y_data[1]]
    for i in range(n_cycles):
        for plot_name in added_obj_plot_data:
            added_obj_plot_data[plot_name] = False
        cycle_input = dict((input_type,input_data[input_type][i]) for input_type in input_data)
        for obj in net_objects_list:
            obj.eval_input(cycle_input)
        for obj in net_objects_list:
            obj.update_state()
            if obj.name in simulation_conf["plot"].keys() and not added_obj_plot_data[obj.name]:
                added_obj_plot_data[obj.name] = True
                for y_data in simulation_conf["plot"][obj.name]:
                    if y_data[0] == 'valence':
                        to_app = obj.state[0]
                    elif y_data[0] == 'arousal':
                        to_app = obj.state[1]
                    else:
                        n_min,n_max = simulation_conf["minmax"][y_data[0]][0],simulation_conf["minmax"][y_data[0]][1]
                        to_app = helpers.normalize(obj.last_inputs[y_data[0]],n_min,n_max)
                    plot_data[obj.name][y_data[0]][0].append(to_app)
    i = 1
    for plot_name in plot_data:
        plt.title(plot_name)
        for y_label in plot_data[plot_name]:
            plt.plot(*plot_data[plot_name][y_label],label=y_label)
        plt.axis([0,n_cycles-1,-0.5,1.5])
        plt.legend()
        if i != len(simulation_conf["plot"].keys()):
            plt.figure()
        i += 1
    plt.show()
Пример #41
0
for filename in os.listdir(args.rdatdir):
    if not os.path.isdir(args.rdatdir+'/'+filename):
        print filename
    rdat = RDATFile()
    rdat.load(open(args.rdatdir+'/'+filename))
    for cname in rdat.constructs:
        construct = rdat.constructs[cname]
        struct = SecondaryStructure(construct.structure)
        frags = struct.explode()
        for data in construct.data:
            if (('mutation' not in data.annotations) or \
                    ('mutation' in data.annotations and \
                    'WT' in data.annotations['mutation'])):
                if 'modifier' in data.annotations:
                    if args.normalize:
                        normvals = normalize(data.values)
                    else:
                        normvals = data.values
                        iqr = scoreatpercentile(normvals, 75) - scoreatpercentile(normvals, 25)
            for fragtype in frags:
                db['all'].extend(normvals)
                if data.errors:
                    db['all'].extend(data.errors)
                dbidx['all'] = dict([((construct.name, construct.seqpos[i]), v) for i, v in enumerate(normvals)])
                fraglist = frags[fragtype]
                for frag in fraglist:
                    vals = []
                    valerrors = []
                    pos = []
                    for idx in frag:
                        try:
Пример #42
0
 def orientation(self):
     rotation = math.radians(self.rotation)
     x = math.sin(rotation)
     y = math.cos(rotation)
     normalized = helpers.normalize((x, y))
     return normalized
Пример #43
0
def menu_settings(edit_menu_name=None):
    error = True
    settings = check_and_initialize()
    menu_list = help.get_and_sort(
        settings.get('menu'),
        'parent_order',
        'order'
    )
    top_level_menu = help.get_and_sort(
        settings.get('top_level_menu'),
        'order'
    )

    if edit_menu_name:
        menus = settings.get('menu')
        menu_edit = None
        for item in menus:
            if item.get('name') == edit_menu_name:
                menu_edit = item
                break

        if menu_edit:
            title = "Edit Menu Settings for %s" % \
                help.unslug(edit_menu_name)
            menu_form = help.deploy_custom_form(
                'menu_items_form',
                parent_menu=menu_edit.get('parent'),
                menu_display_name=menu_edit.get('display_name'),
                menu_item_url=menu_edit.get('url'),
                menu_permissions=menu_edit.get('view_permissions'),
                menu_item_status=menu_edit.get('active'),
                db_name=menu_edit.get('name'),
                action='edit'
            )
        else:
            title = "Application Menu Settings"
            menu_form = help.deploy_custom_form('menu_items_form')
            edit_menu_name = None
    else:
        title = "Application Menu Settings"
        menu_form = help.deploy_custom_form('menu_items_form')

    parent_menus = help.generate_parent_menu(settings.get('menu'))
    menu_form.parent_menu.choices = [
        (parent, parent) for parent in parent_menus
    ]

    active_roles = help.generate_active_roles(settings.get('roles'))
    menu_form.menu_permissions.choices = [
        (help.slug(role), role) for role in active_roles
    ]
    if request.method == 'POST' and menu_form.validate_on_submit():
        db_name = help.slug(
            str(request.form.get('db_name'))
        )
        existing_name = g.db.settings.find_one(
            {
                'menu.name': db_name
            }
        )
        if existing_name:
            if not (edit_menu_name and (menu_edit.get('name') == db_name)):
                flash(
                    'Name already exists, please choose another name',
                    'error'
                )
                return render_template(
                    'admin/manage_menu.html',
                    title=title,
                    menu_form=menu_form,
                    menu_list=menu_list,
                    top_level_menu=top_level_menu,
                    error=error
                )

        existing_url = g.db.settings.find_one(
            {
                'menu.url': request.form.get('menu_item_url')
            }
        )
        if existing_url:
            if not (edit_menu_name and
                    menu_edit.get('url') == request.form.get('menu_item_url')):
                flash(
                    'URL is already being used, '
                    'please check the URL and try again',
                    'error'
                )
                return render_template(
                    'admin/manage_menu.html',
                    title=title,
                    menu_form=menu_form,
                    menu_list=menu_list,
                    top_level_menu=top_level_menu,
                    error=error
                )

        if request.form.get('parent_menu') == "Add New Parent":
            if request.form.get('new_parent'):
                existing_parent = g.db.settings.find_one(
                    {
                        'top_level_menu.slug': help.slug(
                            request.form.get('new_parent')
                        )
                    }
                )
                if existing_parent:
                    flash(
                        'Parent is already in use, '
                        'please check the value and try again',
                        'error'
                    )
                    return render_template(
                        'admin/manage_menu.html',
                        title=title,
                        menu_form=menu_form,
                        menu_list=menu_list,
                        top_level_menu=top_level_menu,
                        error=error
                    )
                parent_menu = help.normalize(request.form.get('new_parent'))
            else:
                flash(
                    'New Parent cannot be blank when adding a new Parent Item',
                    'error'
                )
                return render_template(
                    'admin/manage_menu.html',
                    title=title,
                    menu_form=menu_form,
                    menu_list=menu_list,
                    top_level_menu=top_level_menu,
                    error=error
                )
        else:
            parent_menu = help.normalize(request.form.get('parent_menu'))

        status = False
        if request.form.get('menu_item_status'):
            status = True

        if edit_menu_name:
            g.db.settings.update(
                {
                    'menu.name': edit_menu_name
                }, {
                    '$set': {
                        'menu.$.name': db_name,
                        'menu.$.display_name': help.normalize(
                            request.form.get('menu_display_name')
                        ),
                        'menu.$.url': request.form.get('menu_item_url'),
                        'menu.$.view_permissions': request.form.get(
                            'menu_permissions'
                        ),
                        'menu.$.active': status,
                        'menu.$.parent': help.slug(parent_menu),
                        'menu.$.parent_order': help.get_parent_order(
                            parent_menu,
                            settings,
                            request.form.get('menu_display_name')
                        )
                    }
                }
            )
            if (
                (
                    menu_edit.get('display_name') != help.normalize(
                        request.form.get('menu_display_name')
                    )
                ) or (
                    menu_edit.get('parent') != help.slug(parent_menu)
                )
            ):
                help.check_top_level_to_remove(menu_edit)
            flash('Menu Item was edited successfully')
        else:
            g.db.settings.update(
                {
                    '_id': settings.get('_id')
                }, {
                    '$push': {
                        'menu': {
                            'name': db_name,
                            'display_name': help.normalize(
                                request.form.get('menu_display_name')
                            ),
                            'url': request.form.get('menu_item_url'),
                            'view_permissions': request.form.get(
                                'menu_permissions'
                            ),
                            'active': status,
                            'parent': help.slug(parent_menu),
                            'order': help.get_next_order_number(
                                menu_list, parent_menu
                            ),
                            'parent_order': help.get_parent_order(
                                parent_menu,
                                settings,
                                request.form.get('menu_display_name')
                            )
                        }
                    }
                }
            )
            flash('Menu Item successfully Added')

        return redirect(url_for('adminblueprint.menu_settings'))
    elif request.method == 'POST' and not (menu_form.validate_on_submit()):
        flash(
            'Form validation failed. Please check the form and try again',
            'error'
        )
        return render_template(
            'admin/manage_menu.html',
            title=title,
            menu_form=menu_form,
            menu_list=menu_list,
            top_level_menu=top_level_menu,
            error=error
        )
    else:
        if edit_menu_name:
            return render_template(
                'admin/_edit_settings_menu.html',
                menu_form=menu_form,
                name=menu_edit.get('name')
            )
        else:
            return render_template(
                'admin/manage_menu.html',
                title=title,
                menu_form=menu_form,
                menu_list=menu_list,
                top_level_menu=top_level_menu
            )
Пример #44
0
    def _get_title_and_desc(self,token=True):
        try:
            soup = BeautifulSoup(self.text, convertEntities=BeautifulSoup.HTML_ENTITIES)
        except:
            t,v,tb = sys.exc_info()
            l = traceback.format_exception(t, v,tb)
            if self.debug:
                print "".join(l)
            del t
            del v
            del tb
            return {
                "title": [],
                "description": []
            }

        title =""
        try:
            title = smart_str(soup.title.text)
            if self.debug:
                print "\n\ntitle :"
                print title
        except:
            t,v,tb = sys.exc_info()
            l = traceback.format_exception(t, v,tb)
            if self.debug:
                print "".join(l)
            del t
            del v
            del tb
            pass

        desc=""
        try:
            d = pq(self.text)
            desc = d('meta').filter("[name=description]").attr('content')
            if self.debug:
                print "\n\ndescription :"
                print desc
        except:
            t,v,tb = sys.exc_info()
            l = traceback.format_exception(t, v,tb)
            if self.debug:
                print "".join(l)
            del t
            del v
            del tb
            pass

        if token:
            tok_title = []
            tok_desc = []
            if title and len(title):
                if self.debug:
                    print 'dans le if title'
                tok_title = tokenize(normalize(title))
                if self.debug:
                    print str(type(tok_title))
                    print tok_title
            else:
                if self.debug:
                    print 'dans le else title T_T'
                pass

            if desc and len(desc):
                if self.debug:
                    print 'dans le if desc'
                tok_desc = tokenize(normalize(desc))
                if self.debug:
                    print str(type(tok_desc))
                    print tok_desc
            else:
                if self.debug:
                    print 'dans le else desc T_T'
                pass

            if self.debug:
                print tok_title
                print tok_desc

            tok_title.extend(tok_desc)
            if self.debug:
                print "retour token title desc : \n" + str(type(tok_title))
            return tok_title

        else:
            return {
                "title": title,
                "description": desc
            }
Пример #45
0
parser = ArgumentParser()
program_desc = 'Perform K Nearest Neigbhorhood Algorithm on Fruit Data'
parser = ArgumentParser(description=program_desc)
parser.add_argument('k', type=int, help='K-Value for K Nearest Neighbors Algorithm')
args = parser.parse_args()
k = int(args.k)

# Use pandas read_csv method to store data
training_df = pd.read_csv('../Data/fruit.csv')
test_df = pd.read_csv('../Data/testFruit.csv')

# Function to create a 'KNNReadable' object
get_knn_readable = lambda v: KNNReadable([float(v[0]),float(v[1]),float(v[2]),float(v[3])],v[4])

# Store all records as 'KNNReadable' objects
training_data = normalize([get_knn_readable(v) for v in training_df.values])
test_data = normalize([get_knn_readable(v) for v in test_df.values])

if k < 0:
    print 'Invalid Argument'

else:
    if k > len(training_data):
        print 'Not enough elements in training set for specified k value, setting k to size of training set.'
        k = len(training_data)

    # Iterate through test data and classify
    for test_obj in test_data:
        k = len(training_data) if k == 0 else k
        closest_k = get_closest_k(test_obj,training_data,k)
        test_obj.guess = vote_by_neighbor_weights(test_obj,closest_k)