def run(self): '''Runs the pipeline step. ''' queue = QueueClient(account_url=os.getenv('AZ_QS_AC_URL'), queue_name=os.getenv('AZ_QS_QUEUE_NAME'), credential=os.getenv('AZ_QS_SAS_TOKEN')) response = queue.receive_messages(messages_per_page=5) for batch in response.by_page(): for message in batch: filename = message.content label, _ = classifier.predict(filename) self.print('\'{filename}\' classified as \'{label}\'', filename=filename, label=label['name']) with open(self.output['classifier'], 'a') as f: f.write(message.content + ',' + str(label['id']) + '\n') if label['name'] != 'other': entities = ner.predict(filename) self.print('\'{filename}\' has entities \'{entities}\'', filename=filename, entities=entities) with open(self.output['ner'], 'a') as f: f.write(message.content + ',\"' + str(entities) + '\"\n') queue.delete_message(message)
def test_sas_update(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recording_file(self.test_mode): return # Arrange queue_client = self._create_queue() queue_client.enqueue_message(u'message1') token = queue_client.generate_shared_access_signature( QueuePermissions.UPDATE, datetime.utcnow() + timedelta(hours=1), ) messages = queue_client.receive_messages() result = next(messages) # Act service = QueueClient( queue_url=queue_client.url, credential=token, ) service.update_message( result.id, pop_receipt=result.pop_receipt, visibility_timeout=0, content=u'updatedmessage1', ) # Assert result = next(messages) self.assertEqual(u'updatedmessage1', result.content)
def test_sas_read(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recording_file(self.test_mode): return # Arrange queue_client = self._create_queue() queue_client.enqueue_message(u'message1') token = queue_client.generate_shared_access_signature( QueuePermissions.READ, datetime.utcnow() + timedelta(hours=1), datetime.utcnow() - timedelta(minutes=5)) # Act service = QueueClient( queue_url=queue_client.url, credential=token, ) result = service.peek_messages() # Assert self.assertIsNotNone(result) self.assertEqual(1, len(result)) message = result[0] self.assertIsNotNone(message) self.assertNotEqual('', message.id) self.assertEqual(u'message1', message.content)
def test_sas_signed_identifier(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recording_file(self.test_mode): return # Arrange access_policy = AccessPolicy() access_policy.start = datetime.utcnow() - timedelta(hours=1) access_policy.expiry = datetime.utcnow() + timedelta(hours=1) access_policy.permission = QueuePermissions.READ identifiers = {'testid': access_policy} queue_client = self._create_queue() resp = queue_client.set_queue_access_policy(identifiers) queue_client.enqueue_message(u'message1') token = queue_client.generate_shared_access_signature( policy_id='testid') # Act service = QueueClient( queue_url=queue_client.url, credential=token, ) result = service.peek_messages() # Assert self.assertIsNotNone(result) self.assertEqual(1, len(result)) message = result[0] self.assertIsNotNone(message) self.assertNotEqual('', message.id) self.assertEqual(u'message1', message.content)
def test_sas_update(self, resource_group, location, storage_account, storage_account_key): # SAS URL is calculated from storage key, so this test runs live only if not self.is_live: return # Arrange qsc = QueueServiceClient(self._account_url(storage_account.name), storage_account_key) queue_client = self._get_queue_reference(qsc) queue_client.create_queue() queue_client.enqueue_message(u'message1') token = queue_client.generate_shared_access_signature( QueueSasPermissions(update=True), datetime.utcnow() + timedelta(hours=1), ) messages = queue_client.receive_messages() result = next(messages) # Act service = QueueClient( queue_url=queue_client.url, credential=token, ) service.update_message( result.id, pop_receipt=result.pop_receipt, visibility_timeout=0, content=u'updatedmessage1', ) # Assert result = next(messages) self.assertEqual(u'updatedmessage1', result.content)
def test_create_service_with_custom_account_endpoint_path(self, resource_group, location, storage_account, storage_account_key): custom_account_url = "http://local-machine:11002/custom/account/path/" + self.sas_token for service_type in SERVICES.items(): conn_string = 'DefaultEndpointsProtocol=http;AccountName={};AccountKey={};QueueEndpoint={};'.format( storage_account.name, storage_account_key, custom_account_url) # Act service = service_type[0].from_connection_string(conn_string, queue_name="foo") # Assert self.assertEqual(service.account_name, storage_account.name) self.assertEqual(service.credential.account_name, storage_account.name) self.assertEqual(service.credential.account_key, storage_account_key) self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path') service = QueueServiceClient(account_url=custom_account_url) self.assertEqual(service.account_name, None) self.assertEqual(service.credential, None) self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path') self.assertTrue(service.url.startswith('http://local-machine:11002/custom/account/path/?')) service = QueueClient(account_url=custom_account_url, queue_name="foo") self.assertEqual(service.account_name, None) self.assertEqual(service.queue_name, "foo") self.assertEqual(service.credential, None) self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path') self.assertTrue(service.url.startswith('http://local-machine:11002/custom/account/path/foo?')) service = QueueClient.from_queue_url("http://local-machine:11002/custom/account/path/foo" + self.sas_token) self.assertEqual(service.account_name, None) self.assertEqual(service.queue_name, "foo") self.assertEqual(service.credential, None) self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path') self.assertTrue(service.url.startswith('http://local-machine:11002/custom/account/path/foo?'))
def get_queue(queue_name, create_queue, clear_queue): ''' Note that generating the queueclient does not mean there must a queue there as one of the properties of queueclient is "create_queue", so it's really a representation of a queue which may or may not exist yet. ''' keyVaultName = os.environ["KEY_VAULT_NAME"] keyVault_URI = "https://" + keyVaultName + ".vault.azure.net" credential = DefaultAzureCredential() client = SecretClient(vault_url=keyVault_URI, credential=credential) data_access_key = client.get_secret("thecupstore-key") account_url = "https://thecupstore.queue.core.windows.net/" queueclient = QueueClient(account_url=account_url, queue_name=queue_name, credential=data_access_key.value, message_encode_policy=TextBase64EncodePolicy(), message_decode_policy=TextBase64DecodePolicy()) # Check that the queue exists and if not create it if the create switch has been passed as True try: queueclient.get_queue_properties() except: if create_queue: queueclient.create_queue() else: message = "Queue does not exist" else: if clear_queue: queueclient.clear_messages() if 'message' in locals(): # checks for existence of message variable return message else: return queueclient
def test_message_base64_decode_fails(self, resource_group, location, storage_account, storage_account_key): # Arrange qsc = QueueServiceClient(self._account_url(storage_account.name), storage_account_key) queue = QueueClient( account_url=self._account_url(storage_account.name), queue_name=self.get_resource_name(TEST_QUEUE_PREFIX), credential=storage_account_key, message_encode_policy=None, message_decode_policy=BinaryBase64DecodePolicy()) try: queue.create_queue() except ResourceExistsError: pass message = u'xyz' queue.send_message(message) # Action. with self.assertRaises(DecodeError) as e: queue.peek_messages() # Asserts self.assertNotEqual( -1, str(e.exception).find('Message content is not valid base 64'))
def test_sas_add(self, resource_group, location, storage_account, storage_account_key): # SAS URL is calculated from storage key, so this test runs live only if not self.is_live: return # Arrange qsc = QueueServiceClient(self._account_url(storage_account.name), storage_account_key) queue_client = self._get_queue_reference(qsc) queue_client.create_queue() token = queue_client.generate_shared_access_signature( QueuePermissions.ADD, datetime.utcnow() + timedelta(hours=1), ) # Act service = QueueClient( queue_url=queue_client.url, credential=token, ) result = service.enqueue_message(u'addedmessage') # Assert result = next(queue_client.receive_messages()) self.assertEqual(u'addedmessage', result.content)
def test_sas_process(self, resource_group, location, storage_account, storage_account_key): # SAS URL is calculated from storage key, so this test runs live only if not self.is_live: return # Arrange qsc = QueueServiceClient(self._account_url(storage_account.name), storage_account_key) queue_client = self._get_queue_reference(qsc) queue_client.create_queue() queue_client.enqueue_message(u'message1') token = queue_client.generate_shared_access_signature( QueueSasPermissions(process=True), datetime.utcnow() + timedelta(hours=1), ) # Act service = QueueClient( queue_url=queue_client.url, credential=token, ) message = next(service.receive_messages()) # Assert self.assertIsNotNone(message) self.assertNotEqual('', message.id) self.assertEqual(u'message1', message.content)
def test_create_queue_client_with_complete_queue_url(self, resource_group, location, storage_account, storage_account_key): # Arrange queue_url = self.account_url(storage_account, "queue") + "/foo" service = QueueClient(queue_url, queue_name='bar', credential=storage_account_key) # Assert self.assertEqual(service.scheme, 'https') self.assertEqual(service.queue_name, 'bar')
def test_queue_client_api_version_property(self): queue_client = QueueClient( "https://foo.queue.core.windows.net/account", "queue_name", credential="fake_key", api_version=self.api_version_1) self.assertEqual(queue_client.api_version, self.api_version_1) self.assertEqual(queue_client._client._config.version, self.api_version_1) queue_client = QueueClient( "https://foo.queue.core.windows.net/account", "queue_name", credential="fake_key") self.assertEqual(queue_client.api_version, self.api_version_2) self.assertEqual(queue_client._client._config.version, self.api_version_2)
def test_set_access_policy(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recording_file(self.test_mode): return # [START create_queue_client_from_connection_string] from azure.storage.queue import QueueClient queue_client = QueueClient.from_connection_string(self.connection_string, "queuetest") # [END create_queue_client_from_connection_string] # Create the queue try: queue_client.create_queue() except ResourceExistsError: pass queue_client.enqueue_message(u"hello world") try: # [START set_access_policy] # Create an access policy from azure.storage.queue import AccessPolicy, QueuePermissions access_policy = AccessPolicy() access_policy.start = datetime.utcnow() - timedelta(hours=1) access_policy.expiry = datetime.utcnow() + timedelta(hours=1) access_policy.permission = QueuePermissions.READ identifiers = {'my-access-policy-id': access_policy} # Set the access policy queue_client.set_queue_access_policy(identifiers) # [END set_access_policy] # Use the access policy to generate a SAS token # [START queue_client_sas_token] sas_token = queue_client.generate_shared_access_signature( policy_id='my-access-policy-id' ) # [END queue_client_sas_token] # Authenticate with the sas token # [START create_queue_client] q = QueueClient( queue_url=queue_client.url, credential=sas_token ) # [END create_queue_client] # Use the newly authenticated client to receive messages my_message = q.receive_messages() assert my_message is not None finally: # Delete the queue queue_client.delete_queue()
def test_message_no_encoding(self): # Arrange queue = QueueClient( account_url="https://account.queue.core.windows.net", queue_name="queue", credential="account_key", message_encode_policy=None, message_decode_policy=None) # Asserts assert isinstance(queue._config.message_encode_policy, NoEncodePolicy) assert isinstance(queue._config.message_decode_policy, NoDecodePolicy)
def main(accountName, queueName): accountURL = "https://%s.queue.core.windows.net" % (accountName) creds = ManagedIdentityCredential() client = QueueClient(account_url=accountURL, queue_name=queueName, credential=creds) messages = client.receive_messages(messages_per_page=1) for message in messages: print(message.content) client.delete_message(message)
def test_message_text_base64(self, storage_account_name, storage_account_key): # Arrange. qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key) queue = QueueClient( account_url=self.account_url(storage_account_name, "queue"), queue_name=self.get_resource_name(TEST_QUEUE_PREFIX), credential=storage_account_key, message_encode_policy=TextBase64EncodePolicy(), message_decode_policy=TextBase64DecodePolicy()) message = u'\u0001' # Asserts self._validate_encoding(queue, message)
def test_message_bytes_base64(self, resource_group, location, storage_account, storage_account_key): # Arrange. qsc = QueueServiceClient(self.account_url(storage_account, "queue"), storage_account_key) queue = QueueClient( account_url=self.account_url(storage_account, "queue"), queue_name=self.get_resource_name(TEST_QUEUE_PREFIX), credential=storage_account_key, message_encode_policy=BinaryBase64EncodePolicy(), message_decode_policy=BinaryBase64DecodePolicy()) message = b'xyz' # Asserts self._validate_encoding(queue, message)
def test_message_bytes_base64(self): # Arrange. queue_url = self._get_queue_url() credentials = self._get_shared_key_credential() queue = QueueClient( queue_url=queue_url, queue=self.get_resource_name(TEST_QUEUE_PREFIX), credential=credentials, message_encode_policy=BinaryBase64EncodePolicy(), message_decode_policy=BinaryBase64DecodePolicy()) message = b'xyz' # Asserts self._validate_encoding(queue, message)
def test_message_text_fails(self, storage_account_name, storage_account_key): # Arrange qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key) queue = QueueClient( account_url=self.account_url(storage_account_name, "queue"), queue_name=self.get_resource_name(TEST_QUEUE_PREFIX), credential=storage_account_key, message_encode_policy=BinaryBase64EncodePolicy(), message_decode_policy=BinaryBase64DecodePolicy()) # Action. with self.assertRaises(TypeError) as e: message = u'xyz' queue.send_message(message) # Asserts self.assertTrue(str(e.exception).startswith('Message content must be bytes'))
def test_message_text_fails(self): # Arrange queue_url = self._get_queue_url() credentials = self._get_shared_key_credential() queue = QueueClient( queue_url=queue_url, queue=self.get_resource_name(TEST_QUEUE_PREFIX), credential=credentials, message_encode_policy=BinaryBase64EncodePolicy(), message_decode_policy=BinaryBase64DecodePolicy()) # Action. with self.assertRaises(TypeError) as e: message = u'xyz' queue.enqueue_message(message) # Asserts self.assertTrue(str(e.exception).startswith('Message content must be bytes'))
def upload(globpath, container, queue, sas_token_env, storage_account_uri): try: sas_token_env = sas_token_env sas_token = os.getenv(sas_token_env) if sas_token is None: getLogger().error( "Sas token environment variable {} was not defined.".format( sas_token_env)) return 1 files = glob(globpath, recursive=True) for infile in files: blob_name = get_unique_name(infile, os.getenv('HELIX_WORKITEM_ID')) getLogger().info("uploading {}".format(infile)) blob_client = BlobClient( account_url=storage_account_uri.format('blob'), container_name=container, blob_name=blob_name, credential=sas_token) with open(infile, "rb") as data: blob_client.upload_blob(data, blob_type="BlockBlob", content_settings=ContentSettings( content_type="application/json")) if queue is not None: queue_client = QueueClient( account_url=storage_account_uri.format('queue'), queue_name=queue, credential=sas_token, message_encode_policy=TextBase64EncodePolicy()) queue_client.send_message(blob_client.url) getLogger().info("upload complete") return 0 except Exception as ex: getLogger().error('{0}: {1}'.format(type(ex), str(ex))) getLogger().error(format_exc()) return 1
def test_sas_add(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recording_file(self.test_mode): return # Arrange queue_client = self._create_queue() token = queue_client.generate_shared_access_signature( QueuePermissions.ADD, datetime.utcnow() + timedelta(hours=1), ) # Act service = QueueClient( queue_url=queue_client.url, credential=token, ) result = service.enqueue_message(u'addedmessage') # Assert result = next(queue_client.receive_messages()) self.assertEqual(u'addedmessage', result.content)
def test_sas_signed_identifier(self, resource_group, location, storage_account, storage_account_key): # SAS URL is calculated from storage key, so this test runs live only if not self.is_live: return # Arrange access_policy = AccessPolicy() access_policy.start = datetime.utcnow() - timedelta(hours=1) access_policy.expiry = datetime.utcnow() + timedelta(hours=1) access_policy.permission = QueueSasPermissions(read=True) identifiers = {'testid': access_policy} qsc = QueueServiceClient(self._account_url(storage_account.name), storage_account_key) queue_client = self._get_queue_reference(qsc) queue_client.create_queue() resp = queue_client.set_queue_access_policy(identifiers) queue_client.enqueue_message(u'message1') token = queue_client.generate_shared_access_signature( policy_id='testid') # Act service = QueueClient( queue_url=queue_client.url, credential=token, ) result = service.peek_messages() # Assert self.assertIsNotNone(result) self.assertEqual(1, len(result)) message = result[0] self.assertIsNotNone(message) self.assertNotEqual('', message.id) self.assertEqual(u'message1', message.content)
def test_message_base64_decode_fails(self): # Arrange queue_url = self._get_queue_url() credentials = self._get_shared_key_credential() queue = QueueClient( queue_url=queue_url, queue=self.get_resource_name(TEST_QUEUE_PREFIX), credential=credentials, message_encode_policy=TextXMLEncodePolicy(), message_decode_policy=BinaryBase64DecodePolicy()) try: queue.create_queue() except ResourceExistsError: pass message = u'xyz' queue.enqueue_message(message) # Action. with self.assertRaises(DecodeError) as e: queue.peek_messages() # Asserts self.assertNotEqual(-1, str(e.exception).find('Message content is not valid base 64'))
def start_round(): matches = flask.request.get_json( ) # requires header content-type of "application/json" keyVaultName = os.environ["KEY_VAULT_NAME"] keyVault_URI = "https://" + keyVaultName + ".vault.azure.net" credential = DefaultAzureCredential() client = SecretClient(vault_url=keyVault_URI, credential=credential) data_access_key = client.get_secret("thecupstore-key") table_service = TableService(account_name='thecupstore', account_key=data_access_key.value) # Create the query string. Expects a list of matches, each of which is list containing 2 teams. query_string = "" for match in matches: for team in match: query_string += "Name eq \'" + team + "\' or " # Remove trailing ' or ' query_string = query_string[:-4] team_stats = table_service.query_entities('Teams', filter=query_string) global current_round current_round = classes.round.Round(matches, team_stats) # Create the message queue for sending goal updates queue_name = "goalqueue" account_url = "https://thecupstore.queue.core.windows.net/" queueservice = QueueServiceClient(account_url=account_url, credential=credential) queueservice.create_queue(name=queue_name) global queueclient queueclient = QueueClient(account_url=account_url, queue_name=queue_name, credential=data_access_key.value) return '', 200
def test_sas_process(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recording_file(self.test_mode): return # Arrange queue_client = self._create_queue() queue_client.enqueue_message(u'message1') token = queue_client.generate_shared_access_signature( QueuePermissions.PROCESS, datetime.utcnow() + timedelta(hours=1), ) # Act service = QueueClient( queue_url=queue_client.url, credential=token, ) message = next(service.receive_messages()) # Assert self.assertIsNotNone(message) self.assertNotEqual('', message.id) self.assertEqual(u'message1', message.content)
COSMOS_HOST = os.environ['COSMOS_ACCOUNT_URI'] MASTER_KEY = os.environ['COSMOS_ACCOUNT_KEY'] DATABASE_ID = os.environ['COSMOS_DB_ID'] COLLECTION_ID = os.environ['COSMOS_BUILDINGS_COLL'] QUEUE_STORAGE_ACCOUNT = os.environ['QUEUE_STORAGE_ACCOUNT'] QUEUE_STORAGE_KEY = os.environ['QUEUE_STORAGE_KEY'] QUEUE_NAME = os.environ['QUEUE_NAME'] db_link = f"dbs/{DATABASE_ID}" buildings_container_link = f"{db_link}/colls/buildings" alerts_container_link = f"{db_link}/colls/alerts" recommendations_container_link = f"{db_link}/colls/recommendations" cosmos = cmos.CosmosClient(COSMOS_HOST, {'masterKey': MASTER_KEY}) queue = QueueClient(account_url=QUEUE_STORAGE_ACCOUNT, queue_name=QUEUE_NAME, credential=QUEUE_STORAGE_KEY) def transform_location_info(json_object): lat = float(json_object['Lat (B#)']) lon = float(json_object['Long (B#)']) json_object['location'] = { 'type': 'Point', 'coordinates': [lon, lat] } return json_object def cosmos_db_import_data(building_file_path, recommendations_file_path): #connect to db. create it if needed logging.info("Connecting to database...") try: database = cosmos.CreateDatabase({"id": DATABASE_ID})
def _get_service(self, queue_name): return self.clients.get( queue_name, QueueClient(account_url=self.account_url, queue_name=queue_name, credential=self.credential))
def main(trigger: func.QueueMessage): ''' The function has to use imported code libraries to write to the queue because otherwise writes are only done when the function has finished. ''' logging.info('matchengine triggered') message = trigger.get_body().decode( ) # to decode to utf-8 and remove leading b' # The message coming in has to be just text for base 64 decoding, so expect a string of team names in fixture list order. team_list = message.split(",") # Remove the first element as this tells us whether we're playing normal or extra time or penalties game_stage = team_list.pop(0) query_string = "" for team in team_list: query_string += "Name eq \'" + team + "\' or " query_string = query_string[:-4] # Remove trailing ' or ' # Get the team stats from the table keyVaultName = os.environ["KEY_VAULT_NAME"] keyVault_URI = "https://" + keyVaultName + ".vault.azure.net" credential = DefaultAzureCredential() client = SecretClient(vault_url=keyVault_URI, credential=credential) data_access_key = client.get_secret("thecupstore-key") table_service = TableService(account_name='thecupstore', account_key=data_access_key.value) team_stats = table_service.query_entities('Teams', filter=query_string) # Set up the queue to write goals and timer intervals to account_url = "https://thecupstore.queue.core.windows.net/" queue_name = "goalqueue" goal_queue = QueueClient(account_url=account_url, queue_name=queue_name, credential=data_access_key.value, message_encode_policy=TextBase64EncodePolicy()) # Get in fixture list format and create the current round ready to play fixtures = create_fixtures(team_list) current_round = Round(fixtures, team_stats) matches = current_round.get_matches() if game_stage == "normal": MATCH_LENGTH = 90 match_time = 1 elif game_stage == "extra": MATCH_LENGTH = 120 match_time = 91 else: match_time = 120 if game_stage == "normal" or game_stage == "extra": while match_time <= MATCH_LENGTH: for match in matches: for team in match: if goal_chance(team["goal_chance"]): # goal chance created. Check if saved. if goal_saved(team["keeping"]): pass else: # goal scored goal_queue.send_message(team["name"]) logging.info('writing timer to queue ' + str(match_time)) goal_queue.send_message(str(match_time)) # Check if the goalqueue is clear before continuing. This is to keep the matchengine in sync with the user form. This way they should see a smooth # progression of the timer. Without this check matchengine tends to run fast and multiple second jumps are observed. while goal_queue.get_queue_properties( ).approximate_message_count > 0: time.sleep(0.05) match_time += 1 elif game_stage == "penalties": # each team has 5 penalty kicks for penalty_number in range(5): for match in matches: for team in match: if penalty_goal(75): goal_queue.send_message(team["name"]) # add a message to inform game that penalties have completed goal_queue.send_message("done") elif game_stage == "suddendeath": # sudden death penalties for match in matches: for team in match: if penalty_goal(75): goal_queue.send_message(team["name"]) # add a message to inform game that a round of sudden death penalties have completed goal_queue.send_message("done") logging.info('matchengine complete')
print('\nLet\'s create an Azure Storage Queue to drop some messages on.') raw_input('Press Enter to continue...') # Each storage account has a primary and secondary access key. # These keys are used by aplications to access data in your storage account, such as Queues. # Obtain the primary storage access key for use with the rest of the demo response = azurerm.get_storage_account_keys(auth_token, subscription_id, resourcegroup_name, storageaccount_name) storageaccount_keys = json.loads(response.text) storageaccount_primarykey = storageaccount_keys['keys'][0]['value'] # Create the Queue with the Azure Storage SDK and the access key obtained in the previous step queue_client = QueueClient(account_url=storageaccount_url, queue_name='pizzaqueue', credential=storageaccount_primarykey) response = queue_client.create_queue() if response == True: print('Storage Queue: pizzaqueue created successfully.\n') else: print('Error creating Storage Queue.\n') ### # Use the Azure Storage Storage SDK for Python to drop some messages in our Queue ### print( 'Now let\'s drop some messages in our Queue.\nThese messages could indicate a take-out order being received for a customer ordering pizza.' ) raw_input('Press Enter to continue...')