Exemple #1
0
class TableBase(object):
    """docstring for TableBase"""
    def __init__(self):
        super(TableBase, self).__init__()
        self.table_service = TableService(
            account_name='boburstorage',
            account_key=
            'wRgukLsyhLtnI7qEk8mSGnIBC+IsiTTXEDF1/xnmBGDudJLSeYdtyuVzuSN5/cplJz88AJPyoVyjCmL9N1ECXw=='
        )

    def add_empty_row(self, table, partition_key, row_key, status):
        print('adding empty row to table...\n')
        row = {
            'PartitionKey': partition_key,
            'RowKey': row_key,
            'result': '',
            'status': status
        }
        self.table_service.insert_or_replace_entity(table, row)

    def update_row_with_result(self, table, partition_key, row_key, sum,
                               status):
        print('updating table row with result...\n')
        xml = '<?xml version="1.0"?><sum>' + str(sum) + '</sum>'
        row = {
            'PartitionKey': partition_key,
            'RowKey': row_key,
            'result': xml,
            'status': status
        }
        self.table_service.update_entity(table, row)
Exemple #2
0
        def update(self, area, selector, manifest, hash):
            assert area is not None, 'area is none; should already be validated'

            area_config = config.load_area(area)

            tracking_config = config.load_tracking(area_config['tracking'])

            table_service = TableService(account_name=tracking_config['name'], account_key=tracking_config['key1'])
            
            area = area.lower()

            entity = {
                'PartitionKey': area,
                'RowKey': selector.replace('/','_'),
                'selector': selector,
                'hash': hash
            }

            for key, value in [(key, value) for key, value in manifest.iteritems() if key[0] != '_']:
                if key in ('PartitionKey', 'RowKey', 'selector', 'hash'):
                    continue

                entity[key] = value

            table_service.insert_or_replace_entity(self._get_table(area), entity)
def set_last_run_id(run_id):
    table_service = TableService(account_name=STORAGE_ACCOUNT_NAME,
                                 account_key=STORAGE_ACCOUNT_KEY)
    databricks_details = {
        'PartitionKey': 'pdm',
        'RowKey': 'pdm',
        'run_id': str(run_id)
    }
    table_service.insert_or_replace_entity('databricks', databricks_details)
Exemple #4
0
class TableBase(object):
    """docstring for TableBase"""
    def __init__(self):
        super(TableBase, self).__init__()
        self.table_service = TableService(
            account_name='bobur',
            account_key=
            '6e60FZapOXAmUbFBw0SpE1lHRP3RkXOMYRaalWmRBoz4+xI5tvjaJzxXuYyt+yfWxjPXpz5X3PmyIFiQmSkjbw=='
        )

    def create_empty_row(self, status):
        print 'adding empty row to table...\n'
        date = datetime.datetime.now().strftime("%Y-%m-%d")
        generator = 'G1_' + date
        row = {
            'PartitionKey': generator,
            'RowKey': date,
            'result': '',
            'status': status
        }
        self.table_service.insert_or_replace_entity('datatable', row)

    def set_result(self, sum, status):
        print 'updating table row with result...\n'
        date = datetime.datetime.now().strftime("%Y-%m-%d")
        generator = 'G1_' + date

        xml = '<?xml version="1.0"?><sum>' + str(sum) + '</sum>'

        row = {
            'PartitionKey': generator,
            'RowKey': date,
            'result': xml,
            'status': status
        }
        self.table_service.update_entity('datatable', row)

    def create_xml_string(self, _sum):
        root = etree.Element('root')
        root.append(etree.Element('sum'))
        sum = etree.Element('sum')
        sum.text = _sum
        root.append(sum)
        s = etree.tostring(root, pretty_print=True)
        return s
Exemple #5
0
class TableBase(object):
	"""docstring for TableBase"""
	
	def __init__(self):
		super(TableBase, self).__init__()
		self.table_service = TableService(account_name='bobur', account_key='6e60FZapOXAmUbFBw0SpE1lHRP3RkXOMYRaalWmRBoz4+xI5tvjaJzxXuYyt+yfWxjPXpz5X3PmyIFiQmSkjbw==')
		

	def add_empty_row(self, table, partition_key, row_key, status):
		print('adding empty row to table...\n')
		row = { 'PartitionKey': partition_key, 'RowKey': row_key, 'result': '', 'status': status}
		self.table_service.insert_or_replace_entity(table, row)


	def update_row_with_result(self, table, partition_key, row_key, sum, status):
		print('updating table row with result...\n')
		xml = '<?xml version="1.0"?><sum>'+str(sum)+'</sum>'
		row = { 'PartitionKey': partition_key, 'RowKey': row_key, 'result': xml, 'status': status}
		self.table_service.update_entity(table, row)
Exemple #6
0
import vk, urllib.request, csv, json, pprint, time
from pprint import pprint
from azure.storage.table import TableService, Entity, TableBatch
table_service = TableService(
    account_name='seva',
    account_key=
    'SgbxLwWkBH4XuGebxECoXfNVG3mVM5YjOs+SWTDUSacc+3YgUmcafYXrXdz5k0HtlZQ3AuEJ1IcFtZYeGVR9Hw=='
)
batch = TableBatch()
#table_service.delete_table('MyVkApp')
#table_service.create_table('MyVkApp')
login = input("Введите имя пользователя: ")
password = input("Введите пароль: ")
session = vk.AuthSession(app_id='5889724',
                         user_login=login,
                         user_password=password)
api = vk.API(session)
#Вычисление количества друзей текущего пользователя
friends = api.friends.get(count='')
print('количество друзей:')
#pprint(len(friends))
user_info = {
    'PartitionKey': 'user_info',
    'RowKey': '001',
    'description': len(friends)
}
user_info = table_service.insert_or_replace_entity('MyVkApp', user_info)
user_info = table_service.get_entity('MyVkApp', 'user_info', '001')
print(user_info.description)
Exemple #7
0
class az(object):
    def __init__(self,
                 default_table_name=DEFAULT_TABLE,
                 partitionKey='default'):
        self.TABLE_STORAGE_KEY = os.getenv('AZURE_STORAGE_KEY')
        self.STORAGE_NAME = os.getenv('STORAGE_NAME')
        self.default_table_name = default_table_name
        self.default_partition = partitionKey
        if self.TABLE_STORAGE_KEY == None:
            from tokens import TABLE_STORAGE_ACCESS_KEY, STORAGE_ACCOUNT_NAME
            self.TABLE_STORAGE_KEY = TABLE_STORAGE_ACCESS_KEY
            self.STORAGE_NAME = STORAGE_ACCOUNT_NAME
        self.table_service = TableService(account_name=self.STORAGE_NAME,
                                          account_key=self.TABLE_STORAGE_KEY)
        #create_table_if_does_not_exists(self.default_table_name)

    def insert_or_replace_entity_to_azure(self,
                                          rowKey,
                                          entry,
                                          t_name=DEFAULT_TABLE):
        '''
        takes table service
        
        Takes a list 
        Uploads to azure table storage 
        '''
        segment = Entity()
        segment.PartitionKey = self.default_partition
        segment.RowKey = str(rowKey).zfill(8)
        segment.latA = str(entry['latA'])
        segment.longA = str(entry['longA'])
        segment.latB = str(entry['latB'])
        segment.longB = str(entry['longB'])
        segment.colorKey = str(entry['color'])

        #print segment.colorKey

        if os.name == 'nt':
            self.table_service.insert_or_replace_entity(
                t_name, self.default_partition,
                str(rowKey).zfill(8), segment)
        else:
            self.table_service.insert_or_replace_entity(t_name, segment)

    def create_table(self, name):
        return self.table_service.create_table(name)

    def delete_table(self, name):
        return self.table_service.delete_table(name)

    def delete_entity_by_rowKey(self, rowKey, table_name=DEFAULT_TABLE):
        return self.table_service.delete_entity(table_name,
                                                self.default_partition, rowKey)

    def does_table_exist(self, table_name):
        if os.name == 'nt':
            for i in self.table_service.query_tables():
                if i.name == table_name:
                    return True
        else:
            for i in self.table_service.list_tables():
                if i.name == table_name:
                    return True
        return False

    def list_tables(self):
        if os.name == 'nt':
            for j in self.table_service.query_tables():
                print j.name
        else:
            for j in self.table_service.list_tables():
                print j.name

    def create_table_if_does_not_exist(self, table_name=DEFAULT_TABLE):
        if self.does_table_exist(table_name):
            return 'already exists'
        else:
            self.table_service.create_table(table_name)

    def create_entry(self, latA, lonA, latB, lonB, bumpiness):
        x = {
            'latA': latA,
            'longA': lonA,
            'latB': latB,
            'longB': lonB,
            'color': bumpiness
        }
        return x

    def create_random_entry(self):
        x = {
            'latA': random.uniform(37, 38),
            'longA': random.uniform(-122, -123),
            'latB': random.uniform(37, 38),
            'longB': random.uniform(-122, -123),
            'color': random.randint(0, 7)
        }
        return x

    def create_and_insert_or_replace_entity_azure(self,
                                                  latA,
                                                  lonA,
                                                  latB,
                                                  lonB,
                                                  bumpiness,
                                                  rowKey,
                                                  table_name=DEFAULT_TABLE):
        return self.insert_or_replace_entity_to_azure(
            rowKey, create_entry(latA, lonA, latB, lonB, bumpiness),
            table_name)
def main(msg: func.QueueMessage) -> None:
    logging.info("Processing image analysis queue...")

    input_message = msg.get_body().decode('utf-8')

    logging.info(input_message)

    input_message = json.loads(input_message)

    block_blob_service = BlockBlobService(account_name=ACCOUNT_NAME,
                                          account_key=ACCOUNT_KEY)

    blob = input_message["blob"]
    meetingCode = input_message["meeting-code"]
    fileName = input_message["file-name"]
    dateTime = input_message["date-time"]

    # Example of input
    # {"blob" : "AT81CB/image_files/AT81CB_9G9C.jpg", "meeting-code" : "AT81CB","file-name":  "AT81CB_9G9C.jpg","date-time": "13/06/2019 10:00"}

    table_service = TableService(account_name=ACCOUNT_NAME,
                                 account_key=ACCOUNT_KEY)

    records = table_service.query_entities(TABLE_NAME_API_FACE,
                                           filter="PartitionKey eq '" +
                                           meetingCode + "' and RowKey eq '" +
                                           fileName + "' and ApiStatus eq 200")

    if len(records.items) == 0:

        logging.info("File not processed yet. Starting processing...")

        sas_minutes = 10

        sas_url = block_blob_service.generate_blob_shared_access_signature(
            CONTAINER_NAME,
            blob,
            BlobPermissions.READ,
            datetime.utcnow() + timedelta(minutes=sas_minutes),
        )

        logging.info("Publicity of file using shared signature created for " +
                     str(sas_minutes))

        image_url = "https://" + ACCOUNT_NAME + ".blob.core.windows.net/" + \
            CONTAINER_NAME + "/" + blob + "?" + sas_url

        logging.info("Public url generated: " + image_url)

        face_api_url = "https://"+AI_API_REGION + \
            ".api.cognitive.microsoft.com/face/v1.0/detect"

        # Example of output
        # perception = {"time": "08:00", "emotion":
        # {"anger": 0.0, "contempt": 0.001, "disgust": 0.0, "fear": 0.0,
        #    "happiness": 0.97, "neutral": 0.029, "sadness": 0.0, "surprise": 0.0}

        headers = {'Ocp-Apim-Subscription-Key': SUBSCRIPTION_KEY}

        params = {
            'returnFaceId': 'false',
            'returnFaceLandmarks': 'false',
            'returnFaceAttributes': 'emotion',
        }

        logging.info("Starting facial API analysis...")
        logging.info("Processing file " + fileName + "...")

        start_time = datetime.now()

        response = requests.post(face_api_url,
                                 params=params,
                                 headers=headers,
                                 json={"url": image_url})

        end_time = datetime.now()
        api_time = end_time - start_time

        logging.info("Face analysis successfully processed.")

        api_response = {
            "statusCode": response.status_code,
            "reason": response.reason
        }

        api_record = {
            "PartitionKey": meetingCode,
            "RowKey": fileName,
            "ApiStatus": response.status_code,
            "ApiResponse": json.dumps(api_response),
            "TextResponse": json.dumps(response.json()),
            "ApiTimeResponseSeconds": api_time.seconds
        }

        table_service.insert_or_replace_entity(TABLE_NAME_API_FACE, api_record)

        logging.info("Response: " + str(api_response))
        logging.info("Response result: " + str(response.json()))

        faces = response.json()
        qtde_person = len(faces)

        logging.info("Records found " + str(qtde_person))

        if response.status_code == 200 and qtde_person > 0:

            positive = 0
            negative = 0
            neutral = 0

            positive_count = 0
            negative_count = 0

            file_processed = []

            for face in faces:
                file_processed.append({
                    "file-name":
                    fileName,
                    "emotion-analysis":
                    face["faceAttributes"]["emotion"]
                })

                max_key = sorted(
                    face["faceAttributes"]["emotion"],
                    key=(lambda key: face["faceAttributes"]["emotion"][key]),
                    reverse=True)

                logging.info("Max key " + str(max_key[0]))

                if max_key[0] in POSITIVE_EMOTIONS:
                    positive_count += 1
                    positive += face["faceAttributes"]["emotion"][max_key[0]]
                elif max_key[0] in NEGATIVE_EMOTIONS:
                    negative_count += 1
                    negative += face["faceAttributes"]["emotion"][max_key[0]]
                else:
                    neutral += face["faceAttributes"]["emotion"][max_key[0]]

            logging.info("Positives count " + str(positive_count))
            logging.info("Negatives count " + str(negative_count))

            logging.info("Positives points " + str(positive))
            logging.info("Negatives points " + str(negative))
            logging.info("Neutral points " + str(neutral))

            total = positive + negative + neutral

            if total > 0:
                positive_norm = round(positive / total, 3)
                negative_norm = round(negative / total, 3)
                neutral_norm = round(neutral / total, 3)
            else:
                positive_norm = 0
                negative_norm = 0
                neutral_norm = 1

            logging.info("Positives points normed " + str(positive_norm))
            logging.info("Negatives points normed " + str(negative_norm))
            logging.info("Neutral points normed " + str(neutral_norm))

            values = {
                "positive": positive_norm,
                "neutral": neutral_norm,
                "negative": negative_norm
            }

            logging.info("Value points: " + str(values))

            values_max_key = sorted(values,
                                    key=(lambda key: values[key]),
                                    reverse=True)

            logging.info("Value max key: " + values_max_key[0])

            if values_max_key[0] == "negative":
                value = -values[values_max_key[0]]
            elif values_max_key[0] == "neutral":
                value = 0
            else:
                value = values[values_max_key[0]]

            logging.info("Value: " + str(value))

            final_report = {
                "time": dateTime,
                "value": value,
                "file-name": fileName,
                "persons": qtde_person,
                "emotion": values
            }

            records = table_service.query_entities(
                TABLE_NAME_TRACKING,
                filter="PartitionKey eq 'tracking-analysis' and RowKey eq '" +
                meetingCode + "'")
            texts_converted = []

            new_record = True

            if len(records.items) > 0:
                record = records.items[0]

                if "EmotionTimeAnalysis" in records.items[
                        0] and "EmotionCount" in records.items[0]:
                    new_record = False

                if "EmotionTimeAnalysis" in records.items[0]:
                    data_points = json.loads(record["EmotionTimeAnalysis"])
                    data_points.append(final_report)

                    file_processeds = json.loads(record["FacialAnalysis"])
                    file_processeds.append(file_processed)

                if "EmotionCount" in records.items[0]:
                    emotional_count = json.loads(record["EmotionCount"])

                    emotional_count["positive"] += positive_count
                    emotional_count["negative"] += negative_count

                    emotional_count_value = emotional_count["positive"] + \
                        emotional_count["negative"]

                    logging.info("Emotional Count: " +
                                 str(emotional_count_value))

                    emotional_count["total"] = emotional_count_value

                    emotional_count["positive_percentage"] = round(
                        100 * emotional_count["positive"] /
                        emotional_count_value, 3)
                    emotional_count["negative_percentage"] = round(
                        100 * emotional_count["negative"] /
                        emotional_count_value, 3)

            if new_record:
                record = {
                    "PartitionKey": "tracking-analysis",
                    "RowKey": meetingCode
                }

                data_points = [final_report]
                file_processeds = [file_processed]

                emotional_count_total = positive_count + negative_count

                if emotional_count_total > 0:
                    emotional_count = {
                        "positive":
                        positive_count,
                        "negative":
                        negative_count,
                        "total":
                        emotional_count_total,
                        "positive_percentage":
                        round(100 * positive_count / emotional_count_total, 3),
                        "negative_percentage":
                        round(100 * negative_count / emotional_count_total, 3)
                    }
                else:
                    emotional_count = {
                        "positive": 0,
                        "negative": 0,
                        "total": 0,
                        "positive_percentage": 0,
                        "negative_percentage": 0
                    }

            logging.info("Data points: " + str(data_points))

            record["FacialAnalysis"] = json.dumps(file_processeds)
            record["EmotionTimeAnalysis"] = json.dumps(data_points)
            record["EmotionCount"] = json.dumps(emotional_count)

            logging.info(record)

            table_service.insert_or_replace_entity(TABLE_NAME_TRACKING, record)
    else:
        logging.info("Item already processed.")
    'priority': 200
}
table_service.insert_entity('tasktable', task)

task = Entity()
task.PartitionKey = 'tasksSeattle'
task.RowKey = '2'
task.description = 'Wash the car'
task.priority = 100
table_service.insert_entity('tasktable', task)
#
task = {'description': 'Take out the garbage', 'priority': 250}
table_service.update_entity('tasktable', 'tasksSeattle', '1', task)
#
task = {'description': 'Take out the garbage again', 'priority': 250}
table_service.insert_or_replace_entity('tasktable', 'tasksSeattle', '1', task)

task = {'description': 'Buy detergent', 'priority': 300}
table_service.insert_or_replace_entity('tasktable', 'tasksSeattle', '3', task)

task10 = {
    'PartitionKey': 'tasksSeattle',
    'RowKey': '10',
    'description': 'Go grocery shopping',
    'priority': 400
}
task11 = {
    'PartitionKey': 'tasksSeattle',
    'RowKey': '11',
    'description': 'Clean the bathroom',
    'priority': 100
Exemple #10
0
from azure.storage.table import TableService, Entity
from config import *

table_service = TableService(account_name=STORAGE_ACCOUNT_NAME,
                             account_key=STORAGE_ACCOUNT_KEY)

table_service.create_table('tasktable')

task = {
    'PartitionKey': 'tasksSeattle',
    'RowKey': '001',
    'description': 'Take out the trash',
    'priority': 200
}
table_service.insert_or_replace_entity('tasktable', task)

task = table_service.get_entity('tasktable', 'tasksSeattle', '001')
print(task.description)
print(task.priority)

table_service.delete_table('tasktable')
import vk, urllib.request, csv, json, pprint, time
from pprint import pprint
from azure.storage.table import TableService, Entity, TableBatch
table_service = TableService(account_name='seva', account_key='SgbxLwWkBH4XuGebxECoXfNVG3mVM5YjOs+SWTDUSacc+3YgUmcafYXrXdz5k0HtlZQ3AuEJ1IcFtZYeGVR9Hw==')
batch =TableBatch()
#table_service.delete_table('MyVkApp')
table_service.create_table('MyVkApp')
login = input("Введите имя пользователя: ")
password = input("Введите пароль: ")
session = vk.AuthSession(app_id='5889724', user_login=login, user_password=password)
api = vk.API(session)
print('Вставте access token, полученный от вконтакте')
accesstoken =input()
user_data ={'PartitionKey': 'my_user_data', 'RowKey': '001', 'description': accesstoken}
table_service.insert_or_replace_entity('MyVkApp', user_data)
user_data =table_service.get_entity('MyVkApp', 'my_user_data', '001')
print(user_data.description)
Exemple #12
0
class StorageTableContext():
    """Initializes the repository with the specified settings dict.
        Required settings in config dict are:
        - AZURE_STORAGE_NAME
        - STORAGE_KEY
    """
    
    _models = []
    _encryptproperties = False
    _encrypted_properties = []
    _tableservice = None
    _storage_key = ''
    _storage_name = ''

    def __init__(self, **kwargs):

        self._storage_name = kwargs.get('AZURE_STORAGE_NAME', '')
        self._storage_key = kwargs.get('AZURE_STORAGE_KEY', '')

        """ service init """
        self._models = []
        if self._storage_key != '' and self._storage_name != '':
            self._tableservice = TableService(account_name = self._storage_name, account_key = self._storage_key, protocol='https')

        """ encrypt queue service """
        if kwargs.get('AZURE_REQUIRE_ENCRYPTION', False):

            # Create the KEK used for encryption.
            # KeyWrapper is the provided sample implementation, but the user may use their own object as long as it implements the interface above.
            kek = KeyWrapper(kwargs.get('AZURE_KEY_IDENTIFIER', 'otrrentapi'), kwargs.get('SECRET_KEY', 'super-duper-secret')) # Key identifier

            # Create the key resolver used for decryption.
            # KeyResolver is the provided sample implementation, but the user may use whatever implementation they choose so long as the function set on the service object behaves appropriately.
            key_resolver = KeyResolver()
            key_resolver.put_key(kek)

            # Set the require Encryption, KEK and key resolver on the service object.
            self._encryptproperties = True
            self._tableservice.key_encryption_key = kek
            self._tableservice.key_resolver_funcion = key_resolver.resolve_key
            self._tableservice.encryption_resolver_function = self.__encryptionresolver__


        pass

    def __createtable__(self, tablename) -> bool:
        if (not self._tableservice is None):
            try:
                self._tableservice.create_table(tablename)
                return True
            except AzureException as e:
                log.error('failed to create {} with error {}'.format(tablename, e))
                return False
        else:
            return True
        pass

    # Define the encryption resolver_function.
    def __encryptionresolver__(self, pk, rk, property_name):
        if property_name in self._encrypted_properties:
            return True
            #log.debug('encrypt field {}'.format(property_name))
        
        #log.debug('dont encrypt field {}'.format(property_name))
        return False

    def register_model(self, storagemodel:object):
        modelname = storagemodel.__class__.__name__     
        if isinstance(storagemodel, StorageTableModel):
            if (not modelname in self._models):
                self.__createtable__(storagemodel._tablename)
                self._models.append(modelname)

                """ set properties to be encrypted client side """
                if self._encryptproperties:
                    self._encrypted_properties += storagemodel._encryptedproperties

                log.info('model {} registered successfully. Models are {!s}. Encrypted fields are {!s} '.format(modelname, self._models, self._encrypted_properties))      
        pass

    def table_isempty(self, tablename, PartitionKey='', RowKey = '') -> bool:
        if  (not self._tableservice is None):

            filter = "PartitionKey eq '{}'".format(PartitionKey) if PartitionKey != '' else ''
            if filter == '':
                filter = "RowKey eq '{}'".format(RowKey) if RowKey != '' else ''
            else:
                filter = filter + ("and RowKey eq '{}'".format(RowKey) if RowKey != '' else '')
            try:
                entities = list(self._tableservice.query_entities(tablename, filter = filter, select='PartitionKey', num_results=1))
                if len(entities) == 1: 
                    return False
                else:
                    return True

            except AzureMissingResourceHttpError as e:
                log.debug('failed to query {} with error {}'.format(tablename, e))
                return True

        else:
            return True
        pass

    def exists(self, storagemodel) -> bool:
        exists = False
        if isinstance(storagemodel, StorageTableModel):
            modelname = storagemodel.__class__.__name__
            if (modelname in self._models):
                if storagemodel._exists is None:
                    try:
                        entity = self._tableservice.get_entity(storagemodel._tablename, storagemodel.PartitionKey, storagemodel.RowKey)
                        storagemodel._exists = True
                        exists = True
            
                    except AzureMissingResourceHttpError:
                        storagemodel._exists = False
                else:
                    exists = storagemodel._exists
            else:
                log.debug('please register model {} first'.format(modelname))
                        
        return exists       

    def get(self, storagemodel) -> StorageTableModel:
        """ load entity data from storage to vars in self """

        if isinstance(storagemodel, StorageTableModel):
            modelname = storagemodel.__class__.__name__
            if (modelname in self._models):
                try:
                    entity = self._tableservice.get_entity(storagemodel._tablename, storagemodel.PartitionKey, storagemodel.RowKey)
                    storagemodel._exists = True
        
                    """ sync with entity values """
                    for key, default in vars(storagemodel).items():
                        if not key.startswith('_') and key not in ['','PartitionKey','RowKey']:
                            value = getattr(entity, key, None)
                            if not value is None:
                                setattr(storagemodel, key, value)
             
                except AzureMissingResourceHttpError as e:
                    log.debug('can not get table entity:  Table {}, PartitionKey {}, RowKey {} because {!s}'.format(storagemodel._tablename, storagemodel.PartitionKey, storagemodel.RowKey, e))
                    storagemodel._exists = False

                except Exception as e:
                    log.debug('can not get table entity:  Table {}, PartitionKey {}, RowKey {} because {!s}'.format(storagemodel._tablename, storagemodel.PartitionKey, storagemodel.RowKey, e))
                    storagemodel._exists = False

            else:
                log.debug('please register model {} first to {!s}'.format(modelname, self._models))

            return storagemodel

        else:
            return None

    def insert(self, storagemodel) -> StorageTableModel:
        """ insert model into storage """
        if isinstance(storagemodel, StorageTableModel):
            modelname = storagemodel.__class__.__name__
            if (modelname in self._models):
                try:            
                    self._tableservice.insert_or_replace_entity(storagemodel._tablename, storagemodel.entity())
                    storagemodel._exists = True

                except AzureMissingResourceHttpError as e:
                    log.debug('can not insert or replace table entity:  Table {}, PartitionKey {}, RowKey {} because {!s}'.format(storagemodel._tablename, storagemodel.PartitionKey, storagemodel.RowKey, e))
            else:
                log.debug('please register model {} first'.format(modelname))

            return storagemodel
        else:
            return None

    def merge(self, storagemodel) -> StorageTableModel:
        """ try to merge entry """
        if isinstance(storagemodel, StorageTableModel):
            modelname = storagemodel.__class__.__name__
            if (modelname in self._models):
                try:            
                    self._tableservice.insert_or_merge_entity(storagemodel._tablename, storagemodel.entity())
                    storagemodel._exists = True

                except AzureMissingResourceHttpError as e:
                    log.debug('can not insert or merge table entity:  Table {}, PartitionKey {}, RowKey {} because {!s}'.format(storagemodel._tablename, storagemodel.PartitionKey, storagemodel.RowKey, e))
            else:
                log.debug('please register model {} first'.format(modelname))

            return storagemodel
        else:
            return None
    
    def delete(self,storagemodel):
        """ delete existing Entity """
        if isinstance(storagemodel, StorageTableModel):
            modelname = storagemodel.__class__.__name__
            if (modelname in self._models):
                try:
                    self._tableservice.delete_entity(storagemodel._tablename, storagemodel.PartitionKey, storagemodel.RowKey)
                    storagemodel._exists = False

                except AzureMissingResourceHttpError as e:
                    log.debug('can not delete table entity:  Table {}, PartitionKey {}, RowKey {} because {!s}'.format(storagemodel._tablename, storagemodel.PartitionKey, storagemodel.RowKey, e))

            else:
                log.debug('please register model {} first'.format(modelname))

            return storagemodel
        else:
            return None


    def __changeprimarykeys__(self, PartitionKey = '', RowKey = ''):
        """ Change Entity Primary Keys into new instance:

            - PartitionKey and/or
            - RowKey
        """

        PartitionKey = PartitionKey if PartitionKey != '' else self._PartitionKey
        RowKey = RowKey if RowKey != '' else self._RowKey

        """ change Primary Keys if different to existing ones """
        if (PartitionKey != self._PartitionKey) or (RowKey != self._RowKey):
            return True, PartitionKey, RowKey
        else:
            return False, PartitionKey, RowKey
        pass
            
    def moveto(self, PartitionKey = '', RowKey = ''):
        """ Change Entity Primary Keys and move in Storage:

            - PartitionKey and/or
            - RowKey
        """
        changed, PartitionKey, RowKey = self.__changeprimarykeys__(PartitionKey, RowKey)

        if changed:

            """ sync self """
            new = self.copyto(PartitionKey, RowKey)
            new.save()

            """ delete Entity if exists in Storage """
            self.delete()

    def copyto(self, PartitionKey = '', RowKey = '') -> object:
        """ Change Entity Primary Keys and copy to new Instance:

            - PartitionKey and/or
            - RowKey
        """
        changed, PartitionKey, RowKey = self.__changeprimarykeys__(PartitionKey, RowKey)

        self.load()
        new = self
        new._PartitionKey = PartitionKey
        new._RowKey = RowKey
        new.load()

        return new

    def query(self, storagecollection) -> StorageTableCollection:
        if isinstance(storagecollection, StorageTableCollection):
            try:
                storagecollection.extend(self._tableservice.query_entities(storagecollection._tablename,storagecollection._filter))

            except AzureMissingResourceHttpError as e:
                log.debug('can not query table {} with filters {} because {!s}'.format(storagecollection._tablename, storagecollection._filter, e))            

            return storagecollection
        else:
            return None
Exemple #13
0
class TableStorageHandler(logging.Handler):
    """
    Handler class which writes log messages to a Azure Storage table.
    """
    MAX_BATCH_SIZE = 100

    def __init__(self, 
                 account_name=None,
                 account_key=None,
                 protocol='https',
                 table='logs',
                 batch_size=0,
                 extra_properties=None,
                 partition_key_formatter=None,
                 row_key_formatter=None,
                 ):
        """
        Initialize the handler.
        """
        logging.Handler.__init__(self)
        self.service = TableService(account_name=account_name,
                                    account_key=account_key,
                                    protocol=protocol)
        self.meta = {'hostname': gethostname(), 'process': os.getpid()}
        self.table = _formatName(table, self.meta)
        self.ready = False
        self.rowno = 0
        if not partition_key_formatter:
            # default format for partition keys
            fmt = '%(asctime)s'
            datefmt = '%Y%m%d%H%M'
            partition_key_formatter = logging.Formatter(fmt, datefmt)
        self.partition_key_formatter = partition_key_formatter
        if not row_key_formatter:
            # default format for row keys
            fmt = '%(asctime)s%(msecs)03d-%(hostname)s-%(process)d-%(rowno)02d'
            datefmt = '%Y%m%d%H%M%S'
            row_key_formatter = logging.Formatter(fmt, datefmt)
        self.row_key_formatter = row_key_formatter
        # extra properties and formatters for them
        self.extra_properties = extra_properties
        if extra_properties:
            self.extra_property_formatters = {}
            self.extra_property_names = {}
            for extra in extra_properties:
                if _PY3:
                    f = logging.Formatter(fmt=extra, style=extra[0])
                else:
                    f = logging.Formatter(fmt=extra)
                self.extra_property_formatters[extra] = f
                self.extra_property_names[extra] = self._getFormatName(extra)
        # the storage emulator doesn't support batch operations
        if batch_size <= 1 or self.service.use_local_storage:
            self.batch = False
        else:
            self.batch = True
            if batch_size > TableStorageHandler.MAX_BATCH_SIZE:
                self.batch_size = TableStorageHandler.MAX_BATCH_SIZE
            else:
                self.batch_size = batch_size
        if self.batch:
            self.current_partition_key = None

    def _copyLogRecord(self, record):
        copy = logging.makeLogRecord(record.__dict__)
        copy.exc_info = None
        copy.exc_text = None
        if _PY3:
            copy.stack_info = None
        return copy

    def _getFormatName(self, extra):
        name = extra
        style = extra[0]
        if style == '%':
            name = extra[2:extra.index(')')]
        elif _PY3:
            if style == '{':
                name = next(string.Formatter().parse(extra))[1]
            elif style == '$':
                name = extra[1:]
                if name.startswith('{'):
                    name = name[1:-1]
        return name

    def emit(self, record):
        """
        Emit a record.

        Format the record and send it to the specified table.
        """
        try:
            if not self.ready:
                self.service.create_table(self.table)
                if self.batch:
                    self.service.begin_batch()
                self.ready = True
            # generate partition key for the entity
            record.hostname = self.meta['hostname']
            copy = self._copyLogRecord(record)
            partition_key = self.partition_key_formatter.format(copy)
            # ensure entities in the batch all have the same patition key
            if self.batch:
                if self.current_partition_key is not None:
                    if partition_key != self.current_partition_key:
                        self.flush()
                self.current_partition_key = partition_key
            # add log message and extra properties to the entity
            entity = {}
            if self.extra_properties:
                for extra in self.extra_properties:
                    formatter = self.extra_property_formatters[extra]
                    name = self.extra_property_names[extra]
                    entity[name] = formatter.format(copy)
            entity['message'] = self.format(record)
            # generate row key for the entity
            copy.rowno = self.rowno
            row_key = self.row_key_formatter.format(copy)
            # add entitiy to the table
            self.service.insert_or_replace_entity(self.table,
                                                  partition_key,
                                                  row_key,
                                                  entity)
            # commit the ongoing batch if it reaches the high mark
            if self.batch:
                self.rowno += 1
                if self.rowno >= self.batch_size:
                    self.flush()
        except (KeyboardInterrupt, SystemExit):
            raise
        except:
            self.handleError(record)

    def flush(self):
        """
        Ensure all logging output has been flushed.
        """
        if self.batch and self.rowno > 0:
            try:
                self.service.commit_batch()
            finally:
                self.rowno = 0
                self.service.begin_batch()

    def setFormatter(self, fmt):
        """
        Set the message formatter.
        """
        super(TableStorageHandler, self).setFormatter(fmt)
        if self.extra_properties:
            logging._acquireLock()
            try:
                for extra in self.extra_property_formatters.values():
                    extra.converter = fmt.converter
                    extra.datefmt = fmt.datefmt
                    if _PY3:
                        extra.default_time_format = fmt.default_time_format
                        extra.default_msec_format = fmt.default_msec_format
            finally:
                logging._releaseLock()

    def setPartitionKeyFormatter(self, fmt):
        """
        Set the partition key formatter.
        """
        self.partition_key_formatter = fmt

    def setRowKeyFormatter(self, fmt):
        """
        Set the row key formatter.
        """
        self.row_key_formatter = fmt
Exemple #14
0
def main(msg: func.QueueMessage) -> None:

    logging.info("Processing audio analysis queue...")

    stopwords = nltk.corpus.stopwords.words("portuguese")

    input_message = msg.get_body().decode('utf-8')

    logging.info(input_message)

    input_message = json.loads(input_message)

    logging.info("Processing file " + input_message["blob"] + "...")

    table_service = TableService(account_name=ACCOUNT_NAME,
                                 account_key=ACCOUNT_KEY)
    records = table_service.query_entities(
        TABLE_NAME_API_T2S,
        filter="PartitionKey eq 'recording' and RowKey eq '" +
        input_message["meeting-code"] + "' and RecognitionStatus eq 'Success'")

    if len(records.items) == 0:
        blob_service = BlockBlobService(account_name=ACCOUNT_NAME,
                                        account_key=ACCOUNT_KEY)
        blob_entry = blob_service.get_blob_to_bytes(CONTAINER_NAME,
                                                    input_message["blob"],
                                                    timeout=60)
        audio_bytes = blob_entry.content

        url_token_api = "https://"+AI_API_REGION + \
            ".api.cognitive.microsoft.com/sts/v1.0/issueToken"
        api_key = SPEECH2TEXT_API_KEY

        headers = {"Content-Length": "0", "Ocp-Apim-Subscription-Key": api_key}

        start_time = datetime.now()

        api_response = requests.post(url_token_api, headers=headers)
        access_token = str(api_response.content.decode('utf-8'))

        url_stt_api = "https://"+AI_API_REGION + \
            ".stt.speech.microsoft.com/speech/recognition/conversation/cognitiveservices/v1?language=pt-BR"

        headers = {
            "Authorization": "Bearer {0}".format(access_token),
            "Content-Length": str(len(audio_bytes)),
            "Content-type": "audio/wav",
            "codec": "audio/pcm",
            "samplerate": "16000"
        }

        record = {}
        api_response = None
        res_json = None

        try:
            api_response = requests.post(url_stt_api,
                                         headers=headers,
                                         params=None,
                                         data=audio_bytes)

            end_time = datetime.now()
            api_time = end_time - start_time

            logging.info(api_response)

            res_json = json.loads(api_response.content.decode('utf-8'))

            record["RecognitionStatus"] = res_json["RecognitionStatus"]
            record["TextConverted"] = res_json["DisplayText"]
            record["ApiResponse"] = json.dumps(res_json)
            record["ApiTimeResponseSeconds"] = api_time.seconds

            logging.info("Speech to text processed.")

        except Exception as error:
            record["RecognitionStatus"] = "Request Fail"
            record["Exception"] = traceback.format_exc()

            logging.error(error)

        finally:
            record["PartitionKey"] = input_message["meeting-code"]
            record["RowKey"] = input_message["file-name"]
            table_service.insert_or_replace_entity(TABLE_NAME_API_T2S, record)

            logging.info("Result persisted.")

        logging.info("Result:" + str(res_json))

        if res_json is not None and "Message" in res_json:
            raise Exception(res_json["Message"])

        if res_json is not None and res_json["RecognitionStatus"] == "Success":
            logging.info("Decoded speech: " + str(res_json["DisplayText"]))

            records = table_service.query_entities(
                TABLE_NAME_TRACKING,
                filter="PartitionKey eq 'tracking-analysis' and RowKey eq '" +
                input_message["meeting-code"] + "'")
            texts_converted = []

            if len(records.items) > 0:
                record = records.items[0]
                if "TextConverted" in records.items[0]:
                    texts_converted = json.loads(record["TextConverted"])
                    text_converted = {
                        "file-name": input_message["file-name"],
                        "text": res_json["DisplayText"]
                    }

                    if text_converted not in texts_converted:
                        texts_converted.append(text_converted)

                    record["TextConverted"] = json.dumps(texts_converted)
                else:
                    text_converted = {
                        "file-name": input_message["file-name"],
                        "text": res_json["DisplayText"]
                    }
                    texts_converted.append(text_converted)

                    record["TextConverted"] = json.dumps(texts_converted)
            else:
                text_converted = {
                    "file-name": input_message["file-name"],
                    "text": res_json["DisplayText"]
                }
                texts_converted.append(text_converted)
                record = {
                    "PartitionKey": "tracking-analysis",
                    "RowKey": input_message["meeting-code"],
                    "TextConverted": json.dumps(texts_converted)
                }

            text_list = []

            for text_converted in texts_converted:
                text_list.append(text_converted["text"])

            logging.info("Text List: " + str(text_list))

            text_list = set(text_list)
            freq_dist = processar_palavra_chave(text_list)

            record["FreqDist"] = freq_dist

            table_service.insert_or_replace_entity(TABLE_NAME_TRACKING, record)

            logging.info("Message processed successfully:" +
                         str(res_json["DisplayText"]))

        else:
            print("Descartado por falha no reconhecimento de voz.")
            logging.info(
                "Item discarded. Bad quality or audio file corrupted.")
    else:
        logging.info("Item already processed.")
    print('\nFinished :) ')

else:

    print('Azure Table \n')
    data_out = Entity()
    data_out.PartitionKey = userID
    data_out.RowKey = timestamp
    data_out.anger = resultAPI[0]['scores']['anger']
    data_out.contempt = resultAPI[0]['scores']['contempt']
    data_out.disgust = resultAPI[0]['scores']['disgust']
    data_out.fear = resultAPI[0]['scores']['fear']
    data_out.happiness = resultAPI[0]['scores']['happiness']
    data_out.neutral = resultAPI[0]['scores']['neutral']
    data_out.sadness = resultAPI[0]['scores']['sadness']
    data_out.surprise = resultAPI[0]['scores']['surprise']
    data_out.timeSpent = timeSpent
    data_out.click = clicks

    table_service = TableService(account_name=STORAGE_ACCOUNT_NAME,
                                 account_key=STORAGE_ACCOUNT_KEY)

    table_service.create_table('Results')

    table_service.insert_or_replace_entity('Results', data_out)

    output = table_service.get_entity('Results', userID, timestamp)
    print(output)
    print('\nFinished :) ')
Exemple #16
0
table_service.create_table('tasktable')
task = {'PartitionKey': 'tasksSeattle', 'RowKey': '1', 'description' : 'Take out the trash', 'priority' : 200}
table_service.insert_entity('tasktable', task)

task = Entity()
task.PartitionKey = 'tasksSeattle'
task.RowKey = '2'
task.description = 'Wash the car'
task.priority = 100
table_service.insert_entity('tasktable', task)
#
task = {'description' : 'Take out the garbage', 'priority' : 250}
table_service.update_entity('tasktable', 'tasksSeattle', '1', task)
#
task = {'description' : 'Take out the garbage again', 'priority' : 250}
table_service.insert_or_replace_entity('tasktable', 'tasksSeattle', '1', task)

task = {'description' : 'Buy detergent', 'priority' : 300}
table_service.insert_or_replace_entity('tasktable', 'tasksSeattle', '3', task)

task10 = {'PartitionKey': 'tasksSeattle', 'RowKey': '10', 'description' : 'Go grocery shopping', 'priority' : 400}
task11 = {'PartitionKey': 'tasksSeattle', 'RowKey': '11', 'description' : 'Clean the bathroom', 'priority' : 100}
table_service.begin_batch()
table_service.insert_entity('tasktable', task10)
table_service.insert_entity('tasktable', task11)
table_service.commit_batch()

task = table_service.get_entity('tasktable', 'tasksSeattle', '1')
print(task.description)
print(task.priority)
Exemple #17
0
    tab = t['tab']

table_service = TableService(account_name=acc, account_key=key)

wb = load_workbook(filename=cfname)
ws = wb['Sheet1']

PK = 'US_NIST_800-53-r4'
row = 1
while not (ws['B' + str(row)].value is None
           and ws['F' + str(row)].value is None):
    row = row + 1
    rk = ws['B' + str(row)].value
    dm = ws['A' + str(row)].value
    #    sdm = ws['B'+str(row)].value
    title = ws['C' + str(row)].value
    desc = ws['F' + str(row)].value
    sup = ws['G' + str(row)].value
    rel = ws['H' + str(row)].value
    control = {
        'PartitionKey': PK,
        'RowKey': rk,
        'Domain': dm,
        'SupplementalGuide': sup,
        'Title': title,
        'ControlDescription': desc,
        'Related': rel
    }
    table_service.insert_or_replace_entity(tab, control)
    print row, " ", control
class TableStorageHandler(logging.Handler):
    """
    Handler class which writes log messages to a Azure Storage table.
    """
    MAX_BATCH_SIZE = 100

    def __init__(self, 
                 account_name=None,
                 account_key=None,
                 protocol='https',
                 table='logs',
                 batch_size=0,
                 extra_properties=None,
                 partition_key_formatter=None,
                 row_key_formatter=None,
                 is_emulated=False,
                 ):
        """
        Initialize the handler.
        """
        logging.Handler.__init__(self)
        self.service = TableService(account_name=account_name,
                                    account_key=account_key,
                                    is_emulated=is_emulated,
                                    protocol=protocol)
        self.meta = {'hostname': gethostname(), 'process': os.getpid()}
        self.table = _formatName(table, self.meta)
        self.ready = False
        self.rowno = 0
        if not partition_key_formatter:
            # default format for partition keys
            fmt = '%(asctime)s'
            datefmt = '%Y%m%d%H%M'
            partition_key_formatter = logging.Formatter(fmt, datefmt)
        self.partition_key_formatter = partition_key_formatter
        if not row_key_formatter:
            # default format for row keys
            fmt = '%(asctime)s%(msecs)03d-%(hostname)s-%(process)d-%(rowno)02d'
            datefmt = '%Y%m%d%H%M%S'
            row_key_formatter = logging.Formatter(fmt, datefmt)
        self.row_key_formatter = row_key_formatter
        # extra properties and formatters for them
        self.extra_properties = extra_properties
        if extra_properties:
            self.extra_property_formatters = {}
            self.extra_property_names = {}
            for extra in extra_properties:
                if _PY3:
                    f = logging.Formatter(fmt=extra, style=extra[0])
                else:
                    f = logging.Formatter(fmt=extra)
                self.extra_property_formatters[extra] = f
                self.extra_property_names[extra] = self._getFormatName(extra)
        # the storage emulator doesn't support batch operations
        if batch_size <= 1 or is_emulated:
            self.batch = None
        else:
            self.batch = TableBatch()
            if batch_size > TableStorageHandler.MAX_BATCH_SIZE:
                self.batch_size = TableStorageHandler.MAX_BATCH_SIZE
            else:
                self.batch_size = batch_size
        if self.batch:
            self.current_partition_key = None

    def _copyLogRecord(self, record):
        copy = logging.makeLogRecord(record.__dict__)
        copy.exc_info = None
        copy.exc_text = None
        if _PY3:
            copy.stack_info = None
        return copy

    def _getFormatName(self, extra):
        name = extra
        style = extra[0]
        if style == '%':
            name = extra[2:extra.index(')')]
        elif _PY3:
            if style == '{':
                name = next(string.Formatter().parse(extra))[1]
            elif style == '$':
                name = extra[1:]
                if name.startswith('{'):
                    name = name[1:-1]
        return name

    def emit(self, record):
        """
        Emit a record.

        Format the record and send it to the specified table.
        """
        try:
            if not self.ready:
                self.service.create_table(self.table)
                self.ready = True
            # generate partition key for the entity
            record.hostname = self.meta['hostname']
            copy = self._copyLogRecord(record)
            partition_key = self.partition_key_formatter.format(copy)
            # ensure entities in the batch all have the same patition key
            if self.batch:
                if self.current_partition_key is not None:
                    if partition_key != self.current_partition_key:
                        self.flush()
                self.current_partition_key = partition_key
            # add log message and extra properties to the entity
            entity = {}
            if self.extra_properties:
                for extra in self.extra_properties:
                    formatter = self.extra_property_formatters[extra]
                    name = self.extra_property_names[extra]
                    entity[name] = formatter.format(copy)
            entity['message'] = self.format(record)
            # generate row key for the entity
            copy.rowno = self.rowno
            row_key = self.row_key_formatter.format(copy)
            # add entitiy to the table
            entity['PartitionKey'] = partition_key
            entity['RowKey'] = row_key
            if not self.batch:
                self.service.insert_or_replace_entity(self.table, entity)
            else:
                self.batch.insert_or_replace_entity(entity)
                # commit the ongoing batch if it reaches the high mark
                self.rowno += 1
                if self.rowno >= self.batch_size:
                    self.flush()
        except (KeyboardInterrupt, SystemExit):
            raise
        except:
            self.handleError(record)

    def flush(self):
        """
        Ensure all logging output has been flushed.
        """
        if self.batch and self.rowno > 0:
            try:
                self.service.commit_batch(self.table, self.batch)
            finally:
                self.rowno = 0
                self.batch = TableBatch()

    def setFormatter(self, fmt):
        """
        Set the message formatter.
        """
        super(TableStorageHandler, self).setFormatter(fmt)
        if self.extra_properties:
            logging._acquireLock()
            try:
                for extra in self.extra_property_formatters.values():
                    extra.converter = fmt.converter
                    extra.datefmt = fmt.datefmt
                    if _PY3:
                        extra.default_time_format = fmt.default_time_format
                        extra.default_msec_format = fmt.default_msec_format
            finally:
                logging._releaseLock()

    def setPartitionKeyFormatter(self, fmt):
        """
        Set the partition key formatter.
        """
        self.partition_key_formatter = fmt

    def setRowKeyFormatter(self, fmt):
        """
        Set the row key formatter.
        """
        self.row_key_formatter = fmt
Exemple #19
0
def save_image(img: Image, crs, lat, long, height, stats, ptime):
    table_service = TableService(account_name=_LOGS_ACCOUNT_NAME,
                                 account_key=_LOGS_ACCOUNT_KEY)
    task = Entity()
    task.PartitionKey = img.instrument.NAME
    task.RowKey = img.observation_set
    task.processing_time = ptime
    task.cr_count = len(crs)
    task.latitude = str(lat)
    task.longitude = str(long)
    task.height = str(height)
    task.image_type = img.file_type
    task.observation_date = str(img.observation_date)
    task.observation_start_time = str(img.observation_start_time)
    task.equinox = str(img.equinox)
    task.exposition_duration = str(img.exposition_duration)
    task.gain = str(img.gain)
    task.proposal_id = str(img.proposal_id)
    task.position_angle = str(img.position_angle)
    task.right_ascension = str(img.right_ascension)
    task.right_ascension_target = str(img.right_ascension_target)
    task.declination = str(img.declination)
    task.declination_target = str(img.declination_target)

    if img.aperture is not None:
        task.aperture = str(img.aperture)
        task.ecliptic_lon = str(img.ecliptic_lon)
        task.ecliptic_lat = str(img.ecliptic_lat)
        task.galactic_lon = str(img.galactic_lon)
        task.galactic_lat = str(img.galactic_lat)

    task.moon_angle = str(img.moon_angle)
    task.sun_angle = str(img.sun_angle)
    task.sun_altitude = str(img.sun_altitude)
    task.wcs_axes = img.wcs_axes

    for idx in range(1, img.wcs_axes + 1):
        task['wcs_crpix_%d' % idx] = str(img.wcs_crpix(idx))
        task['wcs_crval_%d' % idx] = str(img.wcs_crval(idx))
        task['wcs_ctype_%d' % idx] = str(img.wcs_ctype(idx))
        for part in [1, 2]:
            task['wcs_cd_%d_%d' % (idx, part)] = str(img.wcs_cd(idx, part))
        task['wcs_ltv_%d' % idx] = str(img.wcs_ltv(idx))
        task['wcs_ltm_%d' % idx] = str(img.wcs_ltm(idx))

    task.wcs_pa_aper = str(img.wcs_pa_aper)
    task.wcs_va_factor = str(img.wcs_va_factor)
    task.wcs_orientation = str(img.wcs_orientation)
    task.wcs_ra_aperture = str(img.wcs_ra_aperture)
    task.wcs_dec_aperture = str(img.wcs_dec_aperture)

    # Add stats
    for key, value in stats.items():
        task['stat_%s' % key] = str(value)

    table_service.insert_or_replace_entity('imagestable', task)
    # for chunk in chunks(crs, 100):
    #    batch = TableBatch()
    if __SAVE_CR_SEPARATELY_:
        logging.info('Done inserting image at {} '.format(datetime.datetime.now().replace(microsecond=0)))
        logging.info('Started cr individual inserts')
        for cr in crs:
            cr_task = {'PartitionKey': img.observation_set, 'RowKey': cr.label}

            for prop in cr:
                cr_task[prop] = str(cr[prop])

            table_service.insert_or_replace_entity(cr_task)
import vk, urllib.request, csv, json, pprint, time
from pprint import pprint
from azure.storage.table import TableService, Entity, TableBatch
table_service = TableService(account_name='seva', account_key='SgbxLwWkBH4XuGebxECoXfNVG3mVM5YjOs+SWTDUSacc+3YgUmcafYXrXdz5k0HtlZQ3AuEJ1IcFtZYeGVR9Hw==')
batch =TableBatch()
#table_service.delete_table('MyVkApp')
#table_service.create_table('MyVkApp')
login = input("Введите имя пользователя: ")
password = input("Введите пароль: ")
session = vk.AuthSession(app_id='5889724', user_login=login, user_password=password)
api = vk.API(session)
posts =api.wall.get(count='')
#table_service.create_table('MyVkApp')
all_posts_info ={'PartitionKey': 'posts', 'RowKey': '001', 'all_posts': posts[0]}
all_posts_info =table_service.insert_or_replace_entity('MyVkApp', all_posts_info)
all_posts_info =table_service.get_entity('MyVkApp', 'posts', '001')
print(all_posts_info.all_posts)
class az(object):
    
    def __init__(self, default_table_name=DEFAULT_TABLE, partitionKey='default'):
        self.TABLE_STORAGE_KEY = os.getenv('AZURE_STORAGE_KEY')
        self.STORAGE_NAME = os.getenv('STORAGE_NAME')
        self.default_table_name = default_table_name
        self.default_partition = partitionKey 
        if self.TABLE_STORAGE_KEY == None: 
            from tokens import TABLE_STORAGE_ACCESS_KEY, STORAGE_ACCOUNT_NAME
            self.TABLE_STORAGE_KEY = TABLE_STORAGE_ACCESS_KEY
            self.STORAGE_NAME = STORAGE_ACCOUNT_NAME
        self.table_service = TableService(account_name=self.STORAGE_NAME, account_key=self.TABLE_STORAGE_KEY)
        #create_table_if_does_not_exists(self.default_table_name)
        
    def insert_or_replace_entity_to_azure(self, rowKey, entry, t_name=DEFAULT_TABLE):
        '''
        takes table service
        
        Takes a list 
        Uploads to azure table storage 
        '''
        segment = Entity()
        segment.PartitionKey = self.default_partition
        segment.RowKey = str(rowKey).zfill(8)
        segment.latA = str(entry['latA'])
        segment.longA = str(entry['longA'])
        segment.latB = str(entry['latB'])
        segment.longB = str(entry['longB'])
        segment.colorKey = str(entry['color'])
            
        #print segment.colorKey 
        
        if os.name == 'nt':
            self.table_service.insert_or_replace_entity(t_name, self.default_partition, str(rowKey).zfill(8), segment)
        else:
            self.table_service.insert_or_replace_entity(t_name, segment) 
            
    def create_table(self, name):
        return self.table_service.create_table(name) 
        
    def delete_table(self, name):
        return self.table_service.delete_table(name)
        
    def delete_entity_by_rowKey(self, rowKey, table_name=DEFAULT_TABLE):
        return self.table_service.delete_entity(table_name, self.default_partition, rowKey)
        
        
    def does_table_exist(self, table_name):
        if os.name == 'nt':
            for i in self.table_service.query_tables():
                if i.name == table_name:
                    return True
        else:
            for i in self.table_service.list_tables():
                if i.name == table_name:
                    return True 
        return False 
        
    def list_tables(self):
        if os.name == 'nt':
            for j in self.table_service.query_tables():
                print j.name 
        else:
            for j in self.table_service.list_tables():
                print j.name 
                      
    def create_table_if_does_not_exist(self, table_name=DEFAULT_TABLE):
        if self.does_table_exist(table_name):
            return 'already exists'
        else:
            self.table_service.create_table(table_name)
            
            
    def create_entry(self, latA, lonA, latB, lonB, bumpiness):
        x = {
            'latA':latA,
            'longA':lonA,
            'latB':latB,
            'longB':lonB,
            'color': bumpiness
        }
        return x
        
    def create_random_entry(self):
        x = {
            'latA':random.uniform(37,38),
            'longA':random.uniform(-122,-123),
            'latB':random.uniform(37,38),
            'longB':random.uniform(-122,-123),
            'color': random.randint(0,7)
        }
        return x 
        
    def create_and_insert_or_replace_entity_azure(self, latA, lonA, latB, lonB, bumpiness, rowKey, table_name=DEFAULT_TABLE ):
        return self.insert_or_replace_entity_to_azure(rowKey, create_entry(latA, lonA, latB, lonB, bumpiness), table_name)