コード例 #1
0
    def test_show_rentals(self):
        """test for show_rentals"""

        delete_database()

        import_data(self.folder_name, 'inventory.csv', 'customers.csv',
                    'rental.csv')

        rental_list = []

        rental_list = show_rentals('p00001')

        gold = [{
            'Customer_ID': 'c00001',
            'Name': 'Danny Holme',
            'Home_Address': '231 Richland Street, Santa Ana, CA, 33133',
            'Phone_Number': '253-111-8988',
            'Email_Address': '*****@*****.**'
        }, {
            'Customer_ID': 'c00007',
            'Name': 'Bettey White',
            'Home_Address': '232 Mohuland Drive, Hollywood, CA, 98546',
            'Phone_Number': '555-444-4444',
            'Email_Address': '*****@*****.**'
        }]

        for r, g in zip(rental_list, gold):
            self.assertDictEqual(r, g)

        self.assertEqual(len(rental_list), 2)
コード例 #2
0
    def test_show_customers(self):
        """test for show_customers"""

        delete_database()

        import_data(self.folder_name, 'inventory.csv', 'customers.csv',
                    'rental.csv')

        customers_dict = show_customers()

        gold = {
            'Customer_ID': 'c00007',
            'Name': 'Bettey White',
            'Home_Address': '232 Mohuland Drive, Hollywood, CA, 98546',
            'Phone_Number': '555-444-4444',
            'Email_Address': '*****@*****.**',
            'Status': 1,
            'Credit_Limit': 100000
        }

        self.assertDictEqual(customers_dict['c00007'], gold)


# if __name__ == "__main__":

#     test = TestDatabase()

#     test.test_import_data()

#     test.test_show_rentals()

#     test.test_show_customers()

#     test.test_show_available_products()
コード例 #3
0
 def test_show_available_products(self):
     """rest for show_available_products"""
     delete_database()
     import_data('csvfiles', 'inventory.csv', 'customers.csv', 'rental.csv')
     product_dict = show_available_products()
     test_show = {
         'description': 'television',
         'product_type': 'electronic',
         'total_quantity': '5',
         'available_quantity': 3
     }
     self.assertEqual(product_dict['p00001'], test_show)
コード例 #4
0
    def test_import_data(self):
        """test import all good data"""
        delete_database()

        test_import = import_data(self.folder_name, 'inventory.csv',
                                  'customers.csv', 'rental.csv')

        self.assertEqual(test_import, ((7, 9, 7), (0, 0, 0)))
        """test import bad data"""
        delete_database()
        test_import = import_data(self.folder_name, 'inventory1.csv',
                                  'customers2.csv', 'rental3.csv')

        self.assertEqual(test_import, ((0, 0, 0), (1, 1, 1)))
コード例 #5
0
    def test_show_available_products(self):

        """rest for show_available_products"""

        delete_database()

        import_data(self.folder_name, 'inventory.csv',
                    'customers.csv', 'rental.csv')

        product_dict = show_available_products()

        gold = {'Product_ID': 'p00007', 'Description': 'ipad',
                'Type': 'electronic', 'Total_Quantity': 15}

        self.assertDictEqual(product_dict['p00007'], gold)
コード例 #6
0
    def test_show_rentals(self):
        """test for show_rentals"""

        delete_database()
        import_data('csvfiles', 'inventory.csv', 'customers.csv', 'rental.csv')
        rental_dict = show_rentals('p00001')
        test_rental = {
            'customer_id': 'c00001',
            'customer_name': 'Dorothy Zbornak',
            'customer_address': '6151 Richmond Street, Miami, FL, 33133',
            'phone_number': '555-111-1111',
            'email': '*****@*****.**',
            'quantity': '1'
        }
        self.assertEqual(rental_dict['r00001'], test_rental)
        self.assertEqual(len(rental_dict), 2)
コード例 #7
0
    def test_import_data(self):
        """test import_data"""

        delete_database()
        test_import = import_data('csvfiles', 'inventory.csv', 'customers.csv',
                                  'rental.csv')
        self.assertEqual(test_import, ((4, 4, 4), (0, 0, 0)))
        delete_database()
        test_import = import_data('csvfiles', 'inventory1.csv',
                                  'customers.csv', 'rental.csv')
        self.assertEqual(test_import, ((0, 4, 4), (1, 0, 0)))
        delete_database()
        test_import = import_data('csvfiles', 'inventory.csv',
                                  'customers1.csv', 'rental.csv')
        self.assertEqual(test_import, ((4, 0, 4), (0, 1, 0)))
        delete_database()
        test_import = import_data('csvfiles', 'inventory.csv', 'customers.csv',
                                  'rental1.csv')
        self.assertEqual(test_import, ((4, 4, 0), (0, 0, 1)))
コード例 #8
0
    def test_fill_database(self):
        classifier_data_path = \
            '{}/test_classifier_data.json'.\
            format(os.path.dirname(os.path.abspath(__file__)))
        predictor_data_path = \
            '{}/test_predictor_data.json'.\
            format(os.path.dirname(os.path.abspath(__file__)))
        database.fill(classifier_data_path, predictor_data_path)

        with open(classifier_data_path) as f:
            data = json.load(f)

        for expected in data:
            db_object = database.ClassifierInstance.filter(
                id=uuid.UUID(expected['id'])).allow_filtering().first()
            assert db_object.category == expected['category']
            assert db_object.name == expected['name']
            assert db_object.configuration_id == expected['host_aggregate'][
                'configuration_id']
            assert db_object.parameters == expected['parameters']
            assert db_object.host_aggregate.disk == expected['host_aggregate'][
                'disk']
            assert db_object.host_aggregate.ram == expected['host_aggregate'][
                'ram']
            assert db_object.host_aggregate.name == expected['host_aggregate'][
                'name']
            assert db_object.host_aggregate.configuration_id == expected[
                'host_aggregate']['configuration_id']
            assert db_object.host_aggregate.cpu == expected['host_aggregate'][
                'cpu']
            assert db_object.flavor.vcpus == expected['flavor']['vcpus']
            assert db_object.flavor.disk == expected['flavor']['disk']
            assert db_object.flavor.ram == expected['flavor']['ram']
            assert db_object.flavor.name == expected['flavor']['name']
            assert db_object.image == expected['image']
            assert db_object.host == expected['host']
            assert db_object.instance_id == expected['instance_id']
            assert db_object.resource_usage == expected['load_measured']

            db_object = database.HostAggregate.filter(
                name=expected['host_aggregate'][
                    'name']).allow_filtering().first()
            assert db_object.disk == expected['host_aggregate']['disk']
            assert db_object.ram == expected['host_aggregate']['ram']
            assert db_object.name == expected['host_aggregate']['name']
            assert db_object.configuration_id == expected['host_aggregate'][
                'configuration_id']
            assert db_object.cpu == expected['host_aggregate']['cpu']

        with open(predictor_data_path) as f:
            data = json.load(f)

        for expected in data:
            db_object = database.PredictorInstance.filter(
                id=uuid.UUID(expected['id'])).allow_filtering().first()
            assert db_object.instance_id == expected['instance_id']
            assert db_object.image == expected['image']
            assert db_object.category == expected['category']
            assert db_object.requirements == expected['requirements']
            assert db_object.parameters == expected['parameters']

            db_object = database.Image.filter(
                image=expected['image']).allow_filtering().first()
            assert db_object.image == expected['image']
            assert db_object.category == expected['category']

        database.delete_database()
コード例 #9
0
ファイル: tests.py プロジェクト: akaptur/phonebook
 def tearDown(self):
     for db in glob.glob('*.db'):
         database.delete_database(db)
コード例 #10
0
ファイル: tests.py プロジェクト: akaptur/phonebook
 def tearDown(self):
     if glob.glob('*.db'):
         for db in glob.glob('*.db'):
             database.delete_database(db)
         raise Exception("New .db files were unintentionally created!")
コード例 #11
0
ファイル: filldatabase.py プロジェクト: DaniMunin/ficonDevWeb
import database

database.delete_database()

dishtypes = ["appetizer","meat","fish","dessert"]
fish_names = ["Slashed Sea Bass with Red Onions and Mushrooms","Slashed tuna with Green Onions and potatoes","Grilled fish with mashed potatoes","Slashed swordfish with Red and Green Onions"]
dessert_names = ["Lemon Cupcakes","Coconut-Lime Cheesecake","Five-Star Chocolate Cheesecakes","Tiramisu"]
meat_names = ["Beef Tenderloin","Pork Tenderloin with Green onion","Beef steak with potatoes","Lasagna"]
apetizzer_names = ["Octopus to the party","Spanish Omelette","Calamary to the Roman","Corageous Potatoes"]

dishes_first_letter = ["a","m","f","d"]
dishes_array =[apetizzer_names,meat_names,fish_names,dessert_names]



for j in range (0,4):
    for i in range(0,4):
        name = dishes_array[j][i]
        price = 15
        url = "/images/food/dishes/" + dishes_first_letter[j] + str(i) +".jpg"
        description = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Praesent leo tellus, aliquet quis blandit eget, ultricies quis purus. Aliquam eu fermentum metus. Maecenas accumsan, felis vel rhoncus tempus, erat ligula dignissim sem, id hendrerit urna turpis et nunc. "
        type = dishtypes[j]


        database.new_dish(name,price,url,description,type)

name = "Menu: I'm not fat"
price = 32
description = "Why are you looking at me?, I'm not fat."
dishes = []
dishes.append(database.get_dish_by_name(apetizzer_names[0]))
コード例 #12
0
ファイル: scraper.py プロジェクト: anuj1729/Web-Scraper
subcategory_crawler(category_links)

price_ascending = "price-asc-rank"
price_descending = "price-desc-rank"
average_review = "review-rank"
popularity = "popularity-rank"

print("Number of subcategories:", subcategory_counter)

# This is the crawling depth
depth = 0
BOLD = '\033[1m'
END = '\033[0m'
crawled_categories = set()
database.connect()
database.delete_database()
while queued and depth < 2:
    product_page_url = queued.pop()
    if not crawled_categories.__contains__(product_page_url.get_category()) and product_page_url.get_link() is not None:
        product_link = product_page_url.get_link()
        product_category = product_page_url.get_category()
        # Create the empty json node so that the database node can be created
        database_node = {product_category:{}}
        crawled_categories.add(product_category)
        new_and_popular = get_product_details(product_link, product_category,"new")
        printed_flag = 1
        encoded_url = construct_encoded_url(product_link)
        if encoded_url is not None:
            print("Sorted By:Ratings", "\tURL:", apply_url_filter(encoded_url, average_review))
            ratings=get_product_details(apply_url_filter(encoded_url, average_review), product_category,"ratings")
            print("Sorted By:Price High to Low", "\tURL:", apply_url_filter(encoded_url, price_descending))