def test_sas_update(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recording_file(self.test_mode): return # Arrange entity = self._insert_random_entity() token = self.ts.generate_table_shared_access_signature( self.table_name, TablePermissions.UPDATE, datetime.utcnow() + timedelta(hours=1), ) # Act service = TableService( account_name=self.settings.STORAGE_ACCOUNT_NAME, sas_token=token, ) self._set_test_proxy(service, self.settings) updated_entity = self._create_updated_entity_dict( entity.PartitionKey, entity.RowKey) resp = service.update_entity(self.table_name, updated_entity) # Assert received_entity = self.ts.get_entity(self.table_name, entity.PartitionKey, entity.RowKey) self._assert_updated_entity(received_entity)
def _unmerge_resource(table_client: azuretable.TableService, entity: dict) -> None: """Remove node from entity :param azuretable.TableService table_client: table client """ while True: entity = table_client.get_entity(_STORAGE_CONTAINERS['table_images'], entity['PartitionKey'], entity['RowKey']) # merge VmList into entity evms = [] for i in range(0, _MAX_VMLIST_PROPERTIES): prop = 'VmList{}'.format(i) if prop in entity: evms.extend(entity[prop].split(',')) if _NODEID in evms: evms.remove(_NODEID) for i in range(0, _MAX_VMLIST_PROPERTIES): prop = 'VmList{}'.format(i) start = i * _MAX_VMLIST_IDS_PER_PROPERTY end = start + _MAX_VMLIST_IDS_PER_PROPERTY if end > len(evms): end = len(evms) if start < end: entity[prop] = ','.join(evms[start:end]) else: entity[prop] = None etag = entity['etag'] entity.pop('etag') try: table_client.update_entity(_STORAGE_CONTAINERS['table_images'], entity=entity, if_match=etag) break except azure.common.AzureHttpError as ex: if ex.status_code != 412: raise
table_service = TableService(account_name=storage_account, account_key=storage_key) blob_service = BlockBlobService(account_name=storage_account, account_key=storage_key) credentials = batchauth.SharedKeyCredentials(batch_account, batch_key) batch_client = batch.BatchServiceClient(credentials, base_url=batch_url) entity = table_service.get_entity(table_name, entity_pk, entity_rk) wait_for_tasks_to_complete(table_service, batch_client, table_name, entity, job_id) if table_name == 'DatabaseEntity': container_name = sys.argv[10] files = 0 total_size = 0 db_type = 'Nucleotide' generator = blob_service.list_blobs(container_name, prefix=entity_rk + '.') for blob in generator: files += 1 total_size += blob.properties.content_length extension = blob.name.split(".")[-1] if extension.startswith('p'): db_type = 'Protein' entity = table_service.get_entity(table_name, entity_pk, entity_rk) entity.FileCount = files entity.TotalSize = total_size entity._Type = db_type table_service.update_entity(table_name, entity)
import sys from azure.cosmosdb.table import TableService if __name__ == '__main__': storage_account = sys.argv[1] storage_key = sys.argv[2] entity_pk = sys.argv[3] entity_rk = sys.argv[4] state = sys.argv[5] error = None if len(sys.argv) == 7: error = sys.argv[6] table_service = TableService(account_name=storage_account, account_key=storage_key) entity = table_service.get_entity('SearchEntity', entity_pk, entity_rk) try: entity._State = state if error: entity.Errors = error table_service.update_entity('SearchEntity', entity, if_match=entity.etag) except Exception as e: print('Error updating entityt {}'.format(e))