Exemple #1
0
def test_kpoints_write_explicit(kpoints_parser_explicit, tmpdir):
    """Test read, write and read KPOINTS in explicit mode.

    """

    kpoints = kpoints_parser_explicit.get_dict()
    temp_file = str(tmpdir.join('KPOINTSEXP'))
    kpoints_parser_explicit.write(file_path=temp_file)
    kpoints_parser_explicit_temp = Kpoints(file_path=temp_file)
    kpoints_temp = kpoints_parser_explicit_temp.get_dict()
    assert kpoints_temp['mode'] == 'explicit'
    assert kpoints_temp['comment'] == 'Example file'
    assert kpoints_temp['divisions'] == None
    assert kpoints_temp['shifts'] == None
    assert kpoints_temp['centering'] == None
    points = kpoints_temp['points']
    assert len(points) == 4
    np.testing.assert_allclose(points[0][0], np.array([0.0, 0.0, 0.0]))
    np.testing.assert_allclose(points[1][0], np.array([0.0, 0.0, 0.5]))
    np.testing.assert_allclose(points[2][0], np.array([0.0, 0.5, 0.5]))
    np.testing.assert_allclose(points[3][0], np.array([0.5, 0.5, 0.5]))
    assert utils.isclose(points[0][1], 1.0)
    assert utils.isclose(points[1][1], 1.0)
    assert utils.isclose(points[2][1], 2.0)
    assert utils.isclose(points[3][1], 4.0)
    assert points[0][2]
    assert points[1][2]
    assert points[2][2]
    assert points[3][2]
    assert kpoints_temp['tetra'] == [[6, 1, 2, 3, 4]]
    assert utils.isclose(kpoints_temp['tetra_volume'], 0.183333333333333)
Exemple #2
0
def test_kpoints_params_explicit(kpoints_parser_explicit):
    """Check parameters in KPOINTS for explicit generation.

    """

    kpoints = kpoints_parser_explicit.get_dict()
    assert kpoints['mode'] == 'explicit'
    assert kpoints['comment'] == 'Example file'
    assert kpoints['divisions'] == None
    assert kpoints['shifts'] == None
    assert kpoints['centering'] == None
    points = kpoints['points']
    assert len(points) == 4
    np.testing.assert_allclose(points[0][0], np.array([0.0, 0.0, 0.0]))
    np.testing.assert_allclose(points[1][0], np.array([0.0, 0.0, 0.5]))
    np.testing.assert_allclose(points[2][0], np.array([0.0, 0.5, 0.5]))
    np.testing.assert_allclose(points[3][0], np.array([0.5, 0.5, 0.5]))
    assert utils.isclose(points[0][1], 1.0)
    assert utils.isclose(points[1][1], 1.0)
    assert utils.isclose(points[2][1], 2.0)
    assert utils.isclose(points[3][1], 4.0)
    assert points[0][2]
    assert points[1][2]
    assert points[2][2]
    assert points[3][2]
    assert kpoints['tetra'] == [[6, 1, 2, 3, 4]]
    assert utils.isclose(kpoints['tetra_volume'], 0.183333333333333)
Exemple #3
0
    def obtener_resultado(self, min, max=None, interval=None, velocidad=None):
        if max == None:
            max = min
            min = 0

        tiempo_aproximado = None
        tiempo_real = None

        for tiempo in utils.frange(min, max, interval):
            self.velocidad_anterior.append(
                self.obtener_velocidad_aproximada(tiempo))
            self.velocidad_anterior_real.append(self.obtener_velocidad(tiempo))
            self.tiempo_anterior.append(tiempo)

            if utils.isclose(self.obtener_velocidad_aproximada(tiempo),
                             velocidad,
                             rel_tol=0.001) and tiempo_aproximado is None:
                tiempo_aproximado = tiempo

            if utils.isclose(self.obtener_velocidad(tiempo),
                             velocidad,
                             rel_tol=0.001) and tiempo_real is None:
                tiempo_real = tiempo

            if tiempo_real is not None and tiempo_aproximado is not None:
                return [tiempo_aproximado, tiempo_real]
Exemple #4
0
 def ta_form(self, timestamp, price):
     row = self.get_row_data_timestamp(timestamp)
     slowk,slowd = row['slowk'], row['slowd']
     if slowd > 80:
         self.form = 'overbuy'
     elif slowd < 20:
         self.form = 'oversell'
     else:
         self.form = ''
         df = self.get_data_timestamp(timestamp)
         if df.index.size < 3:
             return
         dif = df['slowk'] - df['slowd']
         if isclose(dif.iloc[-1], 0):
             self.form = 'crossing' ### need forecast next form?
         if dif.iloc[-1] > 0: #check if crossed up
             if dif.iloc[-2] < 0 or isclose(dif.iloc[-2], 0):
                 self.form = 'crossup'
             else:
                 self.form = 'up' ##continus up
         elif dif.iloc[-1] < 0: #check if crossed down
             if dif.iloc[-2] > 0 or isclose(dif.iloc[-2], 0):
                 self.form = 'crossdown'
             else:
                 self.form = 'down'  ##continus down
     return self.form
Exemple #5
0
def test_xml_energies(xml_parser):
    """Check energies.

    """
    import numpy as np

    # Only one ionic step
    xml_data = xml_parser()
    energy = xml_data.get_energies('initial')[0]
    assert utils.isclose(energy, -43.312106219999997)
    energy = xml_data.get_energies('final')[0]
    assert utils.isclose(energy, -43.312106219999997)
    energy = xml_data.get_energies('all')[0]
    assert utils.isclose(energy, -43.312106219999997)

    # Ionic steps
    xml_data = xml_parser(filename='basicrelax.xml')
    energy = xml_data.get_energies('initial')[0]
    assert utils.isclose(energy, -42.91113348)
    energy = xml_data.get_energies('final')[0]
    assert utils.isclose(energy, -43.39087657)
    energy = xml_data.get_energies('all')
    test_array = np.array([
        -42.91113348, -43.27757545, -43.36648855, -43.37734069, -43.38062479,
        -43.38334165, -43.38753003, -43.38708193, -43.38641449, -43.38701639,
        -43.38699488, -43.38773717, -43.38988315, -43.3898822, -43.39011239,
        -43.39020751, -43.39034244, -43.39044584, -43.39087657
    ])
    assert np.allclose(np.array(energy), test_array)
Exemple #6
0
 def ta_form(self, timestamp, price):
     row = self.get_row_data_timestamp(timestamp)
     slowk, slowd = row['slowk'], row['slowd']
     if slowd > 80:
         self.form = 'overbuy'
     elif slowd < 20:
         self.form = 'oversell'
     else:
         self.form = ''
         df = self.get_data_timestamp(timestamp)
         if df.index.size < 3:
             return
         dif = df['slowk'] - df['slowd']
         if isclose(dif.iloc[-1], 0):
             self.form = 'crossing'  ### need forecast next form?
         if dif.iloc[-1] > 0:  #check if crossed up
             if dif.iloc[-2] < 0 or isclose(dif.iloc[-2], 0):
                 self.form = 'crossup'
             else:
                 self.form = 'up'  ##continus up
         elif dif.iloc[-1] < 0:  #check if crossed down
             if dif.iloc[-2] > 0 or isclose(dif.iloc[-2], 0):
                 self.form = 'crossdown'
             else:
                 self.form = 'down'  ##continus down
     return self.form
Exemple #7
0
def test_trig_invariance(angle: float, n: int):
    """Test that cos(θ), sin(θ) ≃ cos(θ + n*360°), sin(θ + n*360°)"""
    r_cos, r_sin = Vector._trig(angle)
    n_cos, n_sin = Vector._trig(angle + 360 * n)

    note(f"δcos: {r_cos - n_cos}")
    assert isclose(r_cos, n_cos, rel_to=[n / 1e9])
    note(f"δsin: {r_sin - n_sin}")
    assert isclose(r_sin, n_sin, rel_to=[n / 1e9])
Exemple #8
0
def test_remarkable_angles(angle, trig):
    """Test that our table of remarkable angles agrees with Vector._trig.

    This is useful both as a consistency test of the table,
    and as a test of Vector._trig (which Vector.rotate uses).
    """
    cos_t, sin_t = trig
    cos_m, sin_m = Vector._trig(angle)

    assert isclose(sin_t, sin_m, abs_tol=0, rel_tol=1e-14)
    assert isclose(cos_t, cos_m, abs_tol=0, rel_tol=1e-14)
def test_xml_energies(xml_parser):
    """Check energies.

    """

    energy = xml_parser.get_energies('initial')[0]
    assert utils.isclose(energy, -43.312106219999997)
    energy = xml_parser.get_energies('final')[0]
    assert utils.isclose(energy, -43.312106219999997)
    energies = xml_parser.get_energies('all')
    assert utils.isclose(energies[0], -43.312106219999997)
    assert utils.isclose(energies[1], -43.312106219999997)
Exemple #10
0
def test_xml_energies_file_object(xml_parser):
    """Check energies using file object.

    """

    xml_data = xml_parser()
    energy = xml_data.get_energies('initial')[0]
    assert utils.isclose(energy, -43.312106219999997)
    energy = xml_data.get_energies('final')[0]
    assert utils.isclose(energy, -43.312106219999997)
    energy = xml_data.get_energies('all')[0]
    assert utils.isclose(energy, -43.312106219999997)
Exemple #11
0
def test_xml_energies(xml_parser):
    """Check energies for runs with no ionic steps.

    """
    import numpy as np

    energy = xml_parser.get_energies('initial')[0]
    assert utils.isclose(energy, -43.312106219999997)
    energy = xml_parser.get_energies('final')[0]
    assert utils.isclose(energy, -43.312106219999997)
    energy = xml_parser.get_energies('all')[0]
    assert utils.isclose(energy, -43.312106219999997)
Exemple #12
0
def test_xml_energies_ionic(xml_parser):
    """Check energies for runs with ionic steps.

    """
    import numpy as np
    
    energy = xml_parser.get_energies('initial')[0]
    assert utils.isclose(energy, -42.91113348)
    energy = xml_parser.get_energies('final')[0]
    assert utils.isclose(energy, -43.39087657)
    energy = xml_parser.get_energies('all')
    test_array = np.array([-42.91113348, -43.27757545, -43.36648855, -43.37734069, -43.38062479, -43.38334165, -43.38753003, -43.38708193, -43.38641449, -43.38701639, -43.38699488, -43.38773717, -43.38988315, -43.3898822, -43.39011239, -43.39020751, -43.39034244, -43.39044584, -43.39087657])
    assert np.allclose(np.array(energy),
                       test_array)
Exemple #13
0
def cross_validation_wrapper(learner, dataset, k=10, trials=1):
    """[Fig 18.8]
    Return the optimal value of size having minimum error
    on validation set.
    err_train: A training error array, indexed by size
    err_val: A validation error array, indexed by size
    """
    err_val = []
    err_train = []
    size = 1

    while True:
        errT, errV = cross_validation(learner, size, dataset, k)
        # Check for convergence provided err_val is not empty
        if (err_train and isclose(err_train[-1], errT, rel_tol=1e-6)):
            best_size = 0
            min_val = math.inf

            i = 0
            while i < size:
                if err_val[i] < min_val:
                    min_val = err_val[i]
                    best_size = i
                i += 1
        err_val.append(errV)
        err_train.append(errT)
        print(err_val)
        size += 1
Exemple #14
0
    def update_upload_speeds(self, speeds, additive):
        if len(self.uploads) != len(speeds):
            raise Exception("Invalid speeds length, must match self.uploads")

        factor = 1 if additive else 0
        s = 0
        for upload, speed in zip(self.uploads, speeds):
            s += 1
            skip = additive and utils.isclose(0, speed)
            skip |= not additive and utils.isclose(speed, upload.speed)
            if skip:
                continue

            new_speed = (upload.speed * factor) + speed

            upload.interrupt((Connection.SPEED_MODIFIED, new_speed))
def cross_validation_wrapper(learner, dataset, k=10, trials=1):
    """[Fig 18.8]
    Return the optimal value of size having minimum error
    on validation set.
    err_train: A training error array, indexed by size
    err_val: A validation error array, indexed by size
    """
    err_val = []
    err_train = []
    size = 1

    while True:
        errT, errV = cross_validation(learner, size, dataset, k)
        # Check for convergence provided err_val is not empty
        if (err_train and isclose(err_train[-1], errT, rel_tol=1e-6)):
            best_size = 0
            min_val = math.inf

            i = 0
            while i<size:
                if err_val[i] < min_val:
                    min_val = err_val[i]
                    best_size = i
                i += 1
        err_val.append(errV)
        err_train.append(errT)
        print(err_val)
        size += 1
Exemple #16
0
def test_text_models():
    flatland = open_data("EN-text/flatland.txt").read()
    wordseq = words(flatland)
    P1 = UnigramWordModel(wordseq)
    P2 = NgramWordModel(2, wordseq)
    P3 = NgramWordModel(3, wordseq)

    # Test top
    assert P1.top(5) == [(2081, 'the'), (1479, 'of'),
                         (1021, 'and'), (1008, 'to'),
                         (850, 'a')]

    assert P2.top(5) == [(368, ('of', 'the')), (152, ('to', 'the')),
                         (152, ('in', 'the')), (86, ('of', 'a')),
                         (80, ('it', 'is'))]

    assert P3.top(5) == [(30, ('a', 'straight', 'line')),
                         (19, ('of', 'three', 'dimensions')),
                         (16, ('the', 'sense', 'of')),
                         (13, ('by', 'the', 'sense')),
                         (13, ('as', 'well', 'as'))]

    # Test isclose
    assert isclose(P1['the'], 0.0611, rel_tol=0.001)
    assert isclose(P2['of', 'the'], 0.0108, rel_tol=0.01)
    assert isclose(P3['so', 'as', 'to'], 0.000323, rel_tol=0.001)

    # Test cond_prob.get
    assert P2.cond_prob.get(('went',)) is None
    assert P3.cond_prob['in', 'order'].dictionary == {'to': 6}

    # Test dictionary
    test_string = 'unigram'
    wordseq = words(test_string)
    P1 = UnigramWordModel(wordseq)
    assert P1.dictionary == {('unigram'): 1}

    test_string = 'bigram text'
    wordseq = words(test_string)
    P2 = NgramWordModel(2, wordseq)
    assert P2.dictionary == {('bigram', 'text'): 1}

    test_string = 'test trigram text here'
    wordseq = words(test_string)
    P3 = NgramWordModel(3, wordseq)
    assert ('test', 'trigram', 'text') in P3.dictionary
    assert ('trigram', 'text', 'here') in P3.dictionary
Exemple #17
0
def test_text_models():
    flatland = open_data("EN-text/flatland.txt").read()
    wordseq = words(flatland)
    P1 = UnigramWordModel(wordseq)
    P2 = NgramWordModel(2, wordseq)
    P3 = NgramWordModel(3, wordseq)

    # Test top
    assert P1.top(5) == [(2081, 'the'), (1479, 'of'),
                         (1021, 'and'), (1008, 'to'),
                         (850, 'a')]

    assert P2.top(5) == [(368, ('of', 'the')), (152, ('to', 'the')),
                         (152, ('in', 'the')), (86, ('of', 'a')),
                         (80, ('it', 'is'))]

    assert P3.top(5) == [(30, ('a', 'straight', 'line')),
                         (19, ('of', 'three', 'dimensions')),
                         (16, ('the', 'sense', 'of')),
                         (13, ('by', 'the', 'sense')),
                         (13, ('as', 'well', 'as'))]

    # Test isclose
    assert isclose(P1['the'], 0.0611, rel_tol=0.001)
    assert isclose(P2['of', 'the'], 0.0108, rel_tol=0.01)
    assert isclose(P3['so', 'as', 'to'], 0.000323, rel_tol=0.001)

    # Test cond_prob.get
    assert P2.cond_prob.get(('went',)) is None
    assert P3.cond_prob['in', 'order'].dictionary == {'to': 6}

    # Test dictionary
    test_string = 'unigram'
    wordseq = words(test_string)
    P1 = UnigramWordModel(wordseq)
    assert P1.dictionary == {('unigram'): 1}

    test_string = 'bigram text'
    wordseq = words(test_string)
    P2 = NgramWordModel(2, wordseq)
    assert P2.dictionary == {('bigram', 'text'): 1}

    test_string = 'test trigram text here'
    wordseq = words(test_string)
    P3 = NgramWordModel(3, wordseq)
    assert ('test', 'trigram', 'text') in P3.dictionary
    assert ('trigram', 'text', 'here') in P3.dictionary
 def validate_result(self, result):
     # edge case
     if str(self.res) == "-0":
         self.res = 0
     result = result.encode('ascii', 'replace')
     result = result.replace('?', '-')
     result = result.replace(',', '')
     result = Decimal(result)
     return utils.isclose(float(result), float(self.res))
Exemple #19
0
    def normalize(self):
        """Verifique se as probabilidades de todos os valores somam 1.
         Retorna a distribuição normalizada.
         Aumenta um ZeroDivisionError se a soma dos valores for 0."""
        total = sum(self.prob.values())
        if not isclose(total, 1.0):
            for val in self.prob:
                self.prob[val] /= total
        return self
Exemple #20
0
 def normalize(self):
     """Make sure the probabilities of all values sum to 1.
     Returns the normalized distribution.
     Raises a ZeroDivisionError if the sum of the values is 0."""
     total = sum(self.prob.values())
     if not isclose(total, 1.0):
         for val in self.prob:
             self.prob[val] /= total
     return self
 def normalize(self):
     """Make sure the probabilities of all values sum to 1.
     Returns the normalized distribution.
     Raises a ZeroDivisionError if the sum of the values is 0."""
     total = sum(self.prob.values())
     if not isclose(total, 1.0):
         for val in self.prob:
             self.prob[val] /= total
     return self
Exemple #22
0
 def ta_form(self, timestamp, price):
     row = self.get_row_data_timestamp(timestamp)
     up, mid, low = row['up'], row['mid'], row['low']
     if isclose(up, low):
         self.form = 'up_low_close'
     elif isclose(up, price):
         self.form = 'up'
     elif isclose(low, price):
         self.form = 'low'
     elif isclose(mid, price):
         self.form = 'mid'
     elif price > up:
         self.form = 'upper'
     elif price < low:
         self.form = 'lower'
     else:
         if price > mid:
             self.form = 'midup'
         elif price < mid:
             self.form = 'midlow'
     return self.form
Exemple #23
0
 def ta_form(self, timestamp, price):
     row = self.get_row_data_timestamp(timestamp)
     up,mid,low = row['up'],row['mid'],row['low']
     if isclose(up, low):
         self.form = 'up_low_close'
     elif isclose(up, price):
         self.form = 'up'
     elif isclose(low, price):
         self.form = 'low'
     elif isclose(mid, price):
         self.form = 'mid'
     elif price > up:
         self.form = 'upper'
     elif price < low:
         self.form = 'lower'
     else:
         if price > mid:
             self.form = 'midup'
         elif price < mid:
             self.form = 'midlow'
     return self.form
Exemple #24
0
def test_kpoints_write_line(kpoints_parser_line, tmpdir):
    """Test read, write and read KPOINTS in line mode.

    """

    kpoints = kpoints_parser_line.get_dict()
    temp_file = str(tmpdir.join('KPOINTSLINE'))
    kpoints_parser_line.write(file_path=temp_file)
    kpoints_parser_line_temp = Kpoints(file_path=temp_file)
    kpoints_temp = kpoints_parser_line_temp.get_dict()
    assert kpoints_temp['mode'] == 'line'
    assert kpoints_temp['comment'] == 'k-points along high symmetry lines'
    assert kpoints_temp['divisions'] == None
    assert kpoints_temp['shifts'] == None
    assert kpoints_temp['centering'] == None
    assert kpoints_temp['num_kpoints'] == 40
    points = kpoints_temp['points']
    np.testing.assert_allclose(points[0][0], np.array([0.0, 0.0, 0.0]))
    np.testing.assert_allclose(points[1][0], np.array([0.5, 0.5, 0.0]))
    np.testing.assert_allclose(points[2][0], np.array([0.5, 0.5, 0.0]))
    np.testing.assert_allclose(points[3][0], np.array([0.5, 0.75, 0.25]))
    np.testing.assert_allclose(points[4][0], np.array([0.5, 0.75, 0.25]))
    np.testing.assert_allclose(points[5][0], np.array([0.0, 0.0, 0.0]))
    assert utils.isclose(points[0][1], 1.0)
    assert utils.isclose(points[1][1], 1.0)
    assert utils.isclose(points[2][1], 1.0)
    assert utils.isclose(points[3][1], 1.0)
    assert utils.isclose(points[4][1], 1.0)
    assert utils.isclose(points[5][1], 1.0)
    assert points[0][2]
    assert points[1][2]
    assert points[2][2]
    assert points[3][2]
    assert points[4][2]
    assert points[5][2]
Exemple #25
0
def test_kpoints_params_line(kpoints_parser_line):
    """Check parameters in KPOINTS for line generation.

    """

    kpoints = kpoints_parser_line.get_dict()
    assert kpoints['mode'] == 'line'
    assert kpoints['comment'] == 'k-points along high symmetry lines'
    assert kpoints['divisions'] == None
    assert kpoints['shifts'] == None
    assert kpoints['centering'] == None
    assert kpoints['num_kpoints'] == 40
    points = kpoints['points']
    np.testing.assert_allclose(points[0][0], np.array([0.0, 0.0, 0.0]))
    np.testing.assert_allclose(points[1][0], np.array([0.5, 0.5, 0.0]))
    np.testing.assert_allclose(points[2][0], np.array([0.5, 0.5, 0.0]))
    np.testing.assert_allclose(points[3][0], np.array([0.5, 0.75, 0.25]))
    np.testing.assert_allclose(points[4][0], np.array([0.5, 0.75, 0.25]))
    np.testing.assert_allclose(points[5][0], np.array([0.0, 0.0, 0.0]))
    assert utils.isclose(points[0][1], 1.0)
    assert utils.isclose(points[1][1], 1.0)
    assert utils.isclose(points[2][1], 1.0)
    assert utils.isclose(points[3][1], 1.0)
    assert utils.isclose(points[4][1], 1.0)
    assert utils.isclose(points[5][1], 1.0)
    assert points[0][2]
    assert points[1][2]
    assert points[2][2]
    assert points[3][2]
    assert points[4][2]
    assert points[5][2]
Exemple #26
0
def test_kpoints_modify_line(kpoints_parser_line, tmpdir):
    """Test read, modify, write and read KPOINTS in line mode.

    """

    kpoints = kpoints_parser_line.get_dict()
    assert kpoints['comment'] == 'k-points along high symmetry lines'
    kpoints_parser_line.modify('comment', 'No comment')
    point = Kpoint(np.array([0.5, 0.5, 0.25]), 1.0)
    kpoints_parser_line.modify('points', point, point_number=3)
    kpoints_parser_line.modify('points', point, point_number=4)
    temp_file = str(tmpdir.join('KPOINTSLINE'))
    kpoints_parser_line.write(file_path=temp_file)
    kpoints_parser_line_temp = Kpoints(file_path=temp_file)
    kpoints_temp = kpoints_parser_line_temp.get_dict()
    assert kpoints_temp['comment'] == 'No comment'
    points = kpoints_temp['points']
    np.testing.assert_allclose(points[0][0], np.array([0.0, 0.0, 0.0]))
    np.testing.assert_allclose(points[1][0], np.array([0.5, 0.5, 0.0]))
    np.testing.assert_allclose(points[2][0], np.array([0.5, 0.5, 0.0]))
    np.testing.assert_allclose(points[3][0], np.array([0.5, 0.5, 0.25]))
    np.testing.assert_allclose(points[4][0], np.array([0.5, 0.5, 0.25]))
    np.testing.assert_allclose(points[5][0], np.array([0.0, 0.0, 0.0]))
    assert utils.isclose(points[0][1], 1.0)
    assert utils.isclose(points[1][1], 1.0)
    assert utils.isclose(points[2][1], 1.0)
    assert utils.isclose(points[3][1], 1.0)
    assert utils.isclose(points[4][1], 1.0)
    assert utils.isclose(points[5][1], 1.0)
    assert points[0][2]
    assert points[1][2]
    assert points[2][2]
    assert points[3][2]
    assert points[4][2]
    assert points[5][2]
Exemple #27
0
def test_text_models():
    flatland = DataFile("EN-text/flatland.txt").read()
    wordseq = words(flatland)
    P1 = UnigramTextModel(wordseq)
    P2 = NgramTextModel(2, wordseq)
    P3 = NgramTextModel(3, wordseq)

    # The most frequent entries in each model
    assert P1.top(10) == [(2081, 'the'), (1479, 'of'), (1021, 'and'),
                          (1008, 'to'), (850, 'a'), (722, 'i'), (640, 'in'),
                          (478, 'that'), (399, 'is'), (348, 'you')]

    assert P2.top(10) == [(368, ('of', 'the')), (152, ('to', 'the')),
                          (152, ('in', 'the')), (86, ('of', 'a')),
                          (80, ('it', 'is')),
                          (71, ('by', 'the')), (68, ('for', 'the')),
                          (68, ('and', 'the')), (62, ('on', 'the')),
                          (60, ('to', 'be'))]

    assert P3.top(10) == [(30, ('a', 'straight', 'line')),
                          (19, ('of', 'three', 'dimensions')),
                          (16, ('the', 'sense', 'of')),
                          (13, ('by', 'the', 'sense')),
                          (13, ('as', 'well', 'as')),
                          (12, ('of', 'the', 'circles')),
                          (12, ('of', 'sight', 'recognition')),
                          (11, ('the', 'number', 'of')),
                          (11, ('that', 'i', 'had')), (11, ('so', 'as', 'to'))]

    assert isclose(P1['the'], 0.0611, rel_tol=0.001)

    assert isclose(P2['of', 'the'], 0.0108, rel_tol=0.01)

    assert isclose(P3['', '', 'but'], 0.0, rel_tol=0.001)
    assert isclose(P3['', '', 'but'], 0.0, rel_tol=0.001)
    assert isclose(P3['so', 'as', 'to'], 0.000323, rel_tol=0.001)

    assert P2.cond_prob.get(('went',)) is None

    assert P3.cond_prob['in', 'order'].dictionary == {'to': 6}
Exemple #28
0
def test_ngram_models():
    flatland = DataFile("EN-text/flatland.txt").read()
    wordseq = words(flatland)
    P1 = UnigramTextModel(wordseq)
    P2 = NgramTextModel(2, wordseq)
    P3 = NgramTextModel(3, wordseq)

    # The most frequent entries in each model
    assert P1.top(10) == [(2081, 'the'), (1479, 'of'), (1021, 'and'),
                          (1008, 'to'), (850, 'a'), (722, 'i'), (640, 'in'),
                          (478, 'that'), (399, 'is'), (348, 'you')]

    assert P2.top(10) == [(368, ('of', 'the')), (152, ('to', 'the')),
                          (152, ('in', 'the')), (86, ('of', 'a')),
                          (80, ('it', 'is')),
                          (71, ('by', 'the')), (68, ('for', 'the')),
                          (68, ('and', 'the')), (62, ('on', 'the')),
                          (60, ('to', 'be'))]

    assert P3.top(10) == [(30, ('a', 'straight', 'line')),
                          (19, ('of', 'three', 'dimensions')),
                          (16, ('the', 'sense', 'of')),
                          (13, ('by', 'the', 'sense')),
                          (13, ('as', 'well', 'as')),
                          (12, ('of', 'the', 'circles')),
                          (12, ('of', 'sight', 'recognition')),
                          (11, ('the', 'number', 'of')),
                          (11, ('that', 'i', 'had')), (11, ('so', 'as', 'to'))]

    assert isclose(P1['the'], 0.0611, rel_tol=0.001)

    assert isclose(P2['of', 'the'], 0.0108, rel_tol=0.01)

    assert isclose(P3['', '', 'but'], 0.0, rel_tol=0.001)
    assert isclose(P3['', '', 'but'], 0.0, rel_tol=0.001)
    assert isclose(P3['so', 'as', 'to'], 0.000323, rel_tol=0.001)

    assert P2.cond_prob.get(('went',)) is None

    assert P3.cond_prob['in', 'order'].dictionary == {'to': 6}
Exemple #29
0
def test_dot_rotational_invariance(x: Vector, y: Vector, angle: float):
    """Test that rotating vectors doesn't change their dot product."""
    t = x.angle(y)
    cos_t, _ = Vector._trig(t)
    note(f"θ: {t}")
    note(f"cos θ: {cos_t}")

    # Exclude near-orthogonal test inputs
    assume(abs(cos_t) > 1e-6)
    assert isclose(x * y,
                   x.rotate(angle) * y.rotate(angle),
                   rel_to=(x, y),
                   rel_exp=2)
Exemple #30
0
def test_reflect_angle(initial: Vector, normal: Vector):
    """Test angle-related properties of Vector.reflect:

    * initial.reflect(normal) * normal == - initial * normal
    * normal.angle(initial) == 180 - normal.angle(reflected)
    """
    # Exclude initial vectors that are very small or very close to the surface.
    assume(not angle_isclose(initial.angle(normal) % 180, 90, epsilon=10))
    assume(initial.length > 1e-10)

    reflected = initial.reflect(normal)
    assert isclose((initial * normal), -(reflected * normal))
    assert angle_isclose(normal.angle(initial), 180 - normal.angle(reflected))
Exemple #31
0
def test_dot_from_angle(x: Vector, y: Vector):
    """Test x · y == |x| · |y| · cos(θ)"""
    t = x.angle(y)
    cos_t, _ = Vector._trig(t)

    # Dismiss near-othogonal test inputs
    assume(abs(cos_t) > 1e-6)

    min_len, max_len = sorted((x.length, y.length))
    geometric = min_len * (max_len * cos_t)

    note(f"θ: {t}")
    note(f"cos θ: {cos_t}")
    note(f"algebraic: {x * y}")
    note(f"geometric: {geometric}")
    assert isclose(x * y, geometric, rel_to=(x, y), rel_exp=2)
Exemple #32
0
def test_kpoints_modify_explicit(kpoints_parser_explicit, tmpdir):
    """Test read, modify, write and read KPOINTS in explicit mode.

    """

    kpoints = kpoints_parser_explicit.get_dict()
    assert kpoints['comment'] == 'Example file'
    points = kpoints['points']
    assert len(points) == 4
    np.testing.assert_allclose(points[0][0], np.array([0.0, 0.0, 0.0]))
    np.testing.assert_allclose(points[1][0], np.array([0.0, 0.0, 0.5]))
    np.testing.assert_allclose(points[2][0], np.array([0.0, 0.5, 0.5]))
    np.testing.assert_allclose(points[3][0], np.array([0.5, 0.5, 0.5]))
    assert utils.isclose(points[0][1], 1.0)
    assert utils.isclose(points[1][1], 1.0)
    assert utils.isclose(points[2][1], 2.0)
    assert utils.isclose(points[3][1], 4.0)
    assert points[0][2]
    assert points[1][2]
    assert points[2][2]
    assert points[3][2]
    kpoints_parser_explicit.modify('comment', 'Nada comment')
    point = Kpoint(np.array([0.0, 0.0, 0.0]), 1.0)
    kpoints_parser_explicit.modify('points', point, point_number=3)
    temp_file = str(tmpdir.join('KPOINTSEXP'))
    kpoints_parser_explicit.write(file_path=temp_file)
    kpoints_parser_explicit_temp = Kpoints(file_path=temp_file)
    kpoints_temp = kpoints_parser_explicit_temp.get_dict()
    assert kpoints_temp['comment'] == 'Nada comment'
    points = kpoints_temp['points']
    assert len(points) == 4
    np.testing.assert_allclose(points[0][0], np.array([0.0, 0.0, 0.0]))
    np.testing.assert_allclose(points[1][0], np.array([0.0, 0.0, 0.5]))
    np.testing.assert_allclose(points[2][0], np.array([0.0, 0.5, 0.5]))
    np.testing.assert_allclose(points[3][0], np.array([0.0, 0.0, 0.0]))
    assert utils.isclose(points[0][1], 1.0)
    assert utils.isclose(points[1][1], 1.0)
    assert utils.isclose(points[2][1], 2.0)
    assert utils.isclose(points[3][1], 1.0)
    assert points[0][2]
    assert points[1][2]
    assert points[2][2]
    assert points[3][2]
Exemple #33
0
def cross_validation_wrapper(learner, dataset, k=10, trials=1):
    """[Fig 18.8]
    Return the optimal value of size having minimum error
    on validation set.
    err_train: A training error array, indexed by size
    err_val: A validation error array, indexed by size
    Size : [extracted from the book] In this section we explain how to select among models that are parameterized by size.
            For example, with polynomials we have size = 1 for linear functions, size = 2 for quadratics,
            and so on. For decision trees, the size could be the number of nodes in the tree. In all cases
            we want to find the value of the size parameter that best balances underfitting and overfitting
            to give the best test set accuracy.
    """
    err_val = []
    err_train = []
    size = 1
    """
    [deviates from pseudocode]
    Intuition: The size, i.e complexity of a function would never be physically greater than the 
    number of total training data points. For example, if we have 3 data points, a polynomial of degree 
    greater than 3 is more than just overfitting. So, instead of looping till infinity, can just add the 
    length of the data set as a rough upper limit to it. 
    """

    while size < len(dataset):
        errT, errV = cross_validation(learner, size, dataset, k)
        # Check for convergence provided err_val is not empty
        if (err_train and isclose(err_train[-1], errT, rel_tol=1e-6)):
            best_size = 0
            min_val = math.inf

            i = 0
            while i < size:
                if err_val[i] < min_val:
                    min_val = err_val[i]
                    best_size = i
                i += 1
        err_val.append(errV)
        err_train.append(errT)
        print(err_val)
        size += 1
Exemple #34
0
def cross_validation_wrapper(learner, dataset, k=10, trials=1):
    """
    Fig 18.8
    Return the optimal value of size having minimum error
    on validataion set
    err_train: a training error array, indexed by size
    err_val: a validataion error array, indexed by size
    """
    err_val = []
    err_train = []
    size = 1
    while True:
        errT, errV = cross_validation(learner, size, dataset, k)
        # Check for convergence provided err_val is not empty
        if (err_val and isclose(err_val[-1], errV, rel_tol=1e-6)):
            best_size = size
            return learner(dataset, best_size)

        err_val.append(errV)
        err_train.append(errT)
        print(err_val)
        size += 1
Exemple #35
0
def cross_validation_wrapper(learner, dataset, k=10, trials=1):
    """
    Fig 18.8
    Return the optimal value of size having minimum error
    on validataion set
    err_train: a training error array, indexed by size
    err_val: a validataion error array, indexed by size
    """
    err_val = []
    err_train = []
    size = 1
    while True:
        errT, errV = cross_validation(learner, size, dataset, k)
        # Check for convergence provided err_val is not empty
        if (err_val and isclose(err_val[-1], errV, rel_tol=1e-6)):
            best_size = size
            return learner(dataset, best_size)

        err_val.append(errV)
        err_train.append(errT)
        print(err_val)
        size += 1
Exemple #36
0
def cross_validation_wrapper(learner, dataset, k=10, trials=1):
    """
    [Figure 18.8]
    Return the optimal value of size having minimum error on validation set.
    errT: a training error array, indexed by size
    errV: a validation error array, indexed by size
    """
    errs = []
    size = 1
    while True:
        errT, errV = cross_validation(learner, dataset, size, k, trials)
        # check for convergence provided err_val is not empty
        if errT and not isclose(errT[-1], errT, rel_tol=1e-6):
            best_size = 0
            min_val = math.inf
            i = 0
            while i < size:
                if errs[i] < min_val:
                    min_val = errs[i]
                    best_size = i
                i += 1
            return learner(dataset, best_size)
        errs.append(errV)
        size += 1
Exemple #37
0
	def bandwidth_check_up(self):
		used_upload = math.fsum(c.speed for c in self.uploads)
		cond = used_upload <= self.up_mbps or utils.isclose(used_upload, self.up_mbps)
		assert cond, "Upload BW exceeded (host ID: %d, used: %f)" % (self.id, used_upload)
Exemple #38
0
def test_normalize_length(v):
    """v.normalize().length == 1 and v == v.length * v.normalize()"""
    assume(v)
    assert isclose(v.normalize().length, 1)
    assert v.isclose(v.length * v.normalize())
Exemple #39
0
	def avail_download_space(self):
		used_download = math.fsum(c.speed for c in self.downloads)
		free = self.down_mbps - used_download
		return 0 if utils.isclose(0, free) else free
Exemple #40
0
	def bandwidth_check_down(self):
		used_download = math.fsum(c.speed for c in self.downloads)
		cond = used_download <= self.down_mbps or utils.isclose(used_download, self.down_mbps)
		assert cond, "Download BW exceeded (host ID: %d, used: %f, original_mbps: %f)" % (self.id, used_download, self.down_mbps)