Пример #1
0
    def testRunAll(self):
        m = mox.Mox()

        m.StubOutWithMock(main, "system")
        m.StubOutWithMock(main, "read_from_file")
        m.StubOutWithMock(main, "get_all")

        m.StubOutWithMock(main, "get_directory")
        main.get_directory("language", "problem", "variation").AndReturn("directory")

        main.get_all().AndReturn([("language", "problem", "variation")])

        main.inputs = [main.ProblemInput(10, 15, 20, 30, 40)]
        main.TIMEOUT = 99
        main.threads = [999]
        main.system(
            (
                ""  # 'timeout 99 ' # TODO timeout on ensisun
                "/usr/bin/time -a -f %e -o "
                "time-language-problem-variation-0-999.out "
                "directory/main < "
                "problem_10_15_20_30_40.in > /dev/null 1>&0 2>&0"
            ),
            timeout=True,
        )
        main.read_from_file("time-language-problem-variation-0-999.out")

        m.ReplayAll()
        main.run_all()
        m.VerifyAll()
        m.UnsetStubs()
Пример #2
0
    def test_big_text_get_and_sort_concordance_term(self):
        """
        Checks if a context sorts right for a given term and can be found properly
        """
        text = read_from_file('lab_1/data.txt')
        tokens = tokenize(text)

        expected = [['although', 'less', 'compact', 'than', 'tex', 'the',
                     'xml', 'structuring', 'promises', 'to', 'make', 'it',
                     'widely', 'usable', 'and', 'allows', 'for', 'instant',
                     'display']]
        actual = sort_concordance(tokens, 'tex', 4, 14, True)
        self.assertEqual(expected, actual)
    def test_get_adjacent_words_several_contexts_big_text(self):
        """
        Checks if adjacent words for a given term can be found in real text properly
        """
        text = read_from_file('lab_1/data.txt')
        tokens = tokenize(text)

        expected = [['epithelial', 'channels'],
                    ['means', 'aluminate'],
                    ['by', 'bicarbonate'],
                    ['the', 'salt']]
        actual = get_adjacent_words(tokens, 'sodium', 1, 1)
        self.assertEqual(expected, actual)
Пример #4
0
    def test_get_concordance_several_contexts_big_text_right(self):
        """
        Checks if contexts for a given term can be found in real text properly
        Taking into consideration right context
        """
        text = read_from_file('lab_1/data.txt')
        tokens = tokenize(text)

        expected = [['means', 'sodium', 'aluminate'],
                    ['by', 'sodium', 'bicarbonate'],
                    ['epithelial', 'sodium', 'channels'],
                    ['the', 'sodium', 'salt']]
        actual = sort_concordance(tokens, 'sodium', 1, 1, False)
        self.assertEqual(expected, actual)
    def test_big_text_get_concordance_term(self):
        """
        Checks if a context for a given term can be found properly
        """
        text = read_from_file('lab_1/data.txt')
        tokens = tokenize(text)

        expected = [['although', 'less', 'compact', 'than', 'tex', 'the',
                     'xml', 'structuring', 'promises', 'to', 'make', 'it',
                     'widely', 'usable', 'and', 'allows', 'for', 'instant',
                     'display', 'in', 'applications', 'such', 'as', 'web',
                     'browsers', 'and', 'facilitates', 'an', 'interpretation',
                     'of', 'its', 'meaning', 'in', 'mathematical', 'software', 'products']]
        actual = get_concordance(tokens, 'tex', 4, 31)
        self.assertEqual(expected, actual)
Пример #6
0
"""
Concordance implementation starter
"""

import os
import main

if __name__ == '__main__':
    #  use data.txt file to test your program
    current_dir = os.path.dirname(os.path.abspath(__file__))
    data = main.read_from_file(os.path.join(current_dir, 'data.txt'))
    stop_words = main.read_from_file(
        os.path.join(current_dir, 'stop_words.txt')).split('\n')

    #  here goes your logic: calling methods from concordance.py
    tokens = main.tokenize(data)
    print('tokens:', tokens[:10])
    print('\n-----------------------------\n')

    tokens = main.remove_stop_words(tokens,
                                    stop_words)  # old: 34 sec, new - 3.4 sec
    print('tokens without stop words:', tokens[:10])
    print('\n-----------------------------\n')

    frequencies = main.calculate_frequencies(
        tokens)  # old: 116 sec, new: ~81 sec
    print('frequency for the first word:', frequencies[tokens[0]])
    print('\n-----------------------------\n')

    top_10 = main.get_top_n_words(frequencies, 10)
    print('top 10 words:', top_10)
Пример #7
0
"""
Concordance implementation starter
"""

from main import read_from_file

if __name__ == '__main__':
    #  use data.txt file to test your program
    data = read_from_file('data.txt')
    stop_words = []

    #  here goes your logic: calling methods from concordance.py

    RESULT = None
    # DO NOT REMOVE NEXT LINE - KEEP IT INTENTIONALLY LAST
    assert RESULT == [(), ()], 'Concordance not working'
Пример #8
0
    return rx


def print_results(data, id):
    # wc(s)

    print("--", id, ":", data["given_name"], data["family_name"])
    print("\t" + data["ftechique"])
    print("\t" + data["gender"])
    print("\t" + data["age"], "years old")
    print("\t" + data["height"] + "cm")
    print("\t" + data["country"])
    print("\t" + data["points"], "points")
    print("\t" + data["place"] + "th", "place")
    #print("\t" + data["categories"])
    print()


if __name__ == "__main__":
    d = main.read_from_file()
    ars = sys.argv
    ars.pop(0)
    search_technique(d, make_regex(ars[0]))
    """
    except Exception as e:
        print(str(e))
        print("Please enter arguments:")
        print("[Fav. technique] [Height] [Gender] [Category]")
        exit()
        """
Пример #9
0
import requests
import time
from main import read_from_file, save_to_file, convert_file

my_api_key = read_from_file("api_key.json")
my_api_key = my_api_key['bb_api_key']

# The 5 category links
#Motherboard - abcat0507008
motherboard="https://api.bestbuy.com/v1/products((categoryPath.id=abcat0507008&customerReviewAverage>=3.5))?&pageSize=100&format=json&show=categoryPath.id,name,regularPrice,salePrice,url,customerReviewAverage,manufacturer,image,longDescription&apiKey="

#CPU - abcat0507010
cpu="https://api.bestbuy.com/v1/products((categoryPath.id=abcat0507010&customerReviewAverage>=3.5))?&pageSize=100&format=json&show=categoryPath.id,name,regularPrice,salePrice,url,customerReviewAverage,manufacturer,image,longDescription&apiKey="

#CPU Cooler - pcmcat339900050006
cpucooler="https://api.bestbuy.com/v1/products((categoryPath.id=pcmcat339900050006&customerReviewAverage>=3.5))?&pageSize=100&format=json&show=categoryPath.id,name,regularPrice,salePrice,url,customerReviewAverage,manufacturer,image,longDescription&apiKey="

#Memory(RAM) - abcat0506000
ram="https://api.bestbuy.com/v1/products((categoryPath.id=abcat0506000&customerReviewAverage>=3.5))?&pageSize=100&format=json&show=categoryPath.id,name,regularPrice,salePrice,url,customerReviewAverage,manufacturer,image,longDescription&apiKey="

#Storage(SSD/HDD) - pcmcat270900050001
storage="https://api.bestbuy.com/v1/products((categoryPath.id=pcmcat270900050001&customerReviewAverage>=3.5))?&pageSize=100&format=json&show=categoryPath.id,name,regularPrice,salePrice,url,customerReviewAverage,manufacturer,image,longDescription&apiKey="

#GPU - abcat0507002
gpu="https://api.bestbuy.com/v1/products((categoryPath.id=abcat0507002&customerReviewAverage>=3.5))?&pageSize=100&format=json&show=categoryPath.id,name,regularPrice,salePrice,url,customerReviewAverage,manufacturer,image,longDescription&apiKey="

#PSU - abcat0507009
psu="https://api.bestbuy.com/v1/products((categoryPath.id=abcat0507009&customerReviewAverage>=3.5))?&pageSize=100&format=json&show=categoryPath.id,name,regularPrice,salePrice,url,customerReviewAverage,manufacturer,image,longDescription&apiKey="

url_bb = motherboard+my_api_key
#bb_json=requests.get(url_bb).json()