def test_add_block_to_append_blob_with_container_sas(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        token = self.bs.generate_container_shared_access_signature(
            self.container_name,
            permission=ContainerPermissions.READ +
            ContainerPermissions.CREATE + ContainerPermissions.ADD,
            expiry=datetime.utcnow() + timedelta(hours=1),
        )

        bs_with_sas = AppendBlobService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
            protocol=self.settings.PROTOCOL)

        # Act
        test_blob_name = "testblobname"
        text_blob_content = "test-blob-content"
        bs_with_sas.create_blob(self.container_name, blob_name=test_blob_name)
        bs_with_sas.append_blob_from_text(self.container_name,
                                          blob_name=test_blob_name,
                                          text=text_blob_content)

        blob_content = bs_with_sas.get_blob_to_text(self.container_name,
                                                    blob_name=test_blob_name)

        # Assert
        self.assertEqual(blob_content.content, text_blob_content)
Example #2
0
    def update_status(self, status=0, errorMsg=None):
        if self.files is not None:
            text = str(status) + '&,&' + datetime.datetime.utcnow().strftime(
                '%a, %d %b %Y %H:%M:%S GMT') + '&,&' + '&,&'.join(self.files)
        else:
            text = str(status) + '&,&' + datetime.datetime.utcnow().strftime(
                '%a, %d %b %Y %H:%M:%S GMT')
        if errorMsg is not None:
            text = str(status) + '&,&' + datetime.datetime.utcnow().strftime(
                '%a, %d %b %Y %H:%M:%S GMT') + '&,&' + errorMsg
        name = self.folderName + '/status.cvs'
        append_blob_service = AppendBlobService(
            account_name='navview',
            account_key=
            '+roYuNmQbtLvq2Tn227ELmb6s1hzavh0qVQwhLORkUpM0DN7gxFc4j+DF/rEla1EsTN2goHEA1J92moOM/lfxg==',
            protocol='http')
        append_blob_service.create_blob(
            container_name='data',
            blob_name=name,
            content_settings=ContentSettings(content_type='text/plain'))
        append_blob_service.append_blob_from_text(container_name='data',
                                                  blob_name=name,
                                                  text=text)


#if __name__ == '__main__':
#DataUpload(dirPath='../../demo_saved_data/2018-07-25-13-01-51').begin_update_files()
Example #3
0
def WriteLog( txt ):
    """ ログファイルにテキストを出力する。末尾に改行コードが追加される。 """
    szRet = "";
    if( DEBUG_MODE ):
        print( r"{0:%Y-%m-%d %H:%M:%S}".format( datetime.datetime.now() ) + r" : " + txt + "\r\n" );
        return( "Debug モードのためスキップしました。" );

    try:
        szRet = "AppendBlobService";
        blob_service    = AppendBlobService(
            account_name,
            account_key
        );
        szRet = "append_blob_from_text";
        blob_service.append_blob_from_text(
            log_container_name,
            log_file_name,
            r"{0:%Y-%m-%d %H:%M:%S}".format( datetime.datetime.now() ) + r" : " + txt + "\r\n"
        )
        szRet = "OK";
    except:
        #szRet = "Log exception";
        pass;
    #try

    return szRet;
Example #4
0
def append_log(today):
    """
    ログ情報をblobにあげる。事前にblobアカウント・blob containerを作っておくこと
    :param today:
    :return:
    """
    print('append log')
    append_blob_service = AppendBlobService(account_name=account_name,
                                            account_key=account_key)
    append_blob_service.append_blob_from_text(container_name, blob_name,
                                              'test' + '\n')
Example #5
0
def azure_blob_storage():
    try:
        # To create a blob service or connection to azure blob storage 
        block_blob_service = BlockBlobService(account_name='account_name', account_key='account_key')

        # To create a container 
        container_name ='container_name'
        block_blob_service.create_container(container_name)

        # Set access to a container such as public ,readonly,private
        block_blob_service.set_container_acl(container_name, public_access=PublicAccess.Container)

        # Create a file to upload to azure blob storage 
        local_file_name ="Test.txt"
        path_to_file =os.path.join('local_path', local_file_name)

        # To write to the file  
        local_file = open(full_path_to_file,  'w')
        local_file.write("hi peoplee")
        local_file.close()

        # Upload the created file, use local_file_name for the blob name
        block_blob_service.create_blob_from_path(container_name, local_file_name, full_path_to_file)

        # To List all the blobs in the container 
        container_List = block_blob_service.list_blobs(container_name)
        for file in container_List:
            print("Blob name: "+{file.name})

        # Download the blob(s).
        download_file_path = os.path.join('local_path', 'local_file_name')
        block_blob_service.get_blob_to_path(container_name, local_file_name, download_file_path)

        #delete a blob 
        block_blob_service.delete_blob('container_name', 'blob_name')

        #Append to a blob service    
        append_blob_service = AppendBlobService(account_name='myaccount', account_key='mykey')

        # The same containers can hold all types of blobs
        append_blob_service.create_container('container_name')

        #To append file must exists 
        append_blob_service.create_blob('container_name', 'append_blob')

        #Append to a blob service    
        append_blob_service.append_blob_from_text('container_name', 'append_blob', 'Hello, world!')
        
        append_blob = append_blob_service.get_blob_to_text('container_name', 'append_blob')


        # Clean up resources. This includes the container and the temp files
        block_blob_service.delete_container(container_name)
    except Exception as e:
Example #6
0
def WriteLog(txt):
    """ ログファイルにテキストを出力する。末尾に改行コードが追加される。 """
    szRet = ""
    try:
        szRet = "AppendBlobService"
        blob_service = AppendBlobService(account_name, account_key)
        szRet = "append_blob_from_text"
        blob_service.append_blob_from_text(
            log_container_name, log_file_name,
            r"{0:%Y-%m-%d %H:%M:%S}".format(datetime.datetime.now()) + r" : " +
            txt + "\r\n")
        szRet = "OK"
    except:
        #szRet = "Log exception";
        pass
    return szRet
Example #7
0
class AppendBlob:
    '''Append blob used to append data, each time called append, the content will be append to the blob'''
    def __init__(self):
        #account = account_name or secretconf["azure"]["storage"][0]["account"]
        #key = account_key or secretconf["azure"]["storage"][0]["key"]
        connstr = os.getenv(
            "AZURE_STORAGE_CONNECTION_STRING",
            False) or secretconf["azure"]["storage"][0]["connection_string"]
        self.abservice = AppendBlobService(connection_string=connstr)

    def create(self, container, blob, metadata=None):
        '''Create an empty blob
        
        Args:
            container: name of the container
            blob: name of the blob, use '/' to create a folder
            metadata: meta data (dict object, value must be str) of the text

        Returns:
            url of blob
        '''
        self.abservice.create_blob(container,
                                   blob,
                                   metadata=metadata,
                                   content_settings=textcontent,
                                   if_none_match="*")

        now = datetime.now()
        start = now + timedelta(-1, 0, 0)
        expiry = now + timedelta(365, 0, 0)
        sastoken = self.abservice.generate_blob_shared_access_signature(
            container,
            blob,
            permission=blobpermission,
            expiry=expiry,
            start=start)

        return self.abservice.make_blob_url(container,
                                            blob,
                                            sas_token=sastoken)

    def appendText(self, container, blob, text, metadata=None):
        '''Append text to blob'''
        self.abservice.append_blob_from_text(container, blob, text)
class LogIMU380Data:
    def __init__(self, imu, user):
        '''Initialize and create a CSV file
        '''
        self.name = 'data-' + datetime.datetime.now().strftime(
            '%Y_%m_%d_%H_%M_%S') + '.csv'
        self.file = open('data/' + self.name, 'w')
        self.first_row = 0
        # decode converts out of byte array
        self.sn = imu.device_id.split(" ")[0]
        self.pn = imu.device_id.split(" ")[1]
        self.device_id = imu.device_id
        self.odr_setting = imu.odr_setting
        self.packet_type = imu.packet_type
        odr_rates = {0: 'Quiet', 1: '100Hz', 2: '50Hz', 4: '25Hz'}
        self.sample_rate = odr_rates[self.odr_setting]

    def log(self, data, odr_setting):
        '''Write row of CSV file based on data received.  Uses dictionary keys for column titles
        '''
        if not self.first_row:
            self.first_row = 1
            labels = ''.join('{0:s},'.format(key) for key in data)
            labels = labels[:-1]
            header = labels + '\n'
        else:
            self.first_row += 1
            header = ''

        str = ''
        for key in data:
            if key == 'BITstatus' or key == 'GPSITOW' or key == 'counter' or key == 'timeITOW':
                str += '{0:d},'.format(data[key])
            else:
                str += '{0:3.5f},'.format(data[key])
        str = str[:-1]
        str = str + '\n'
        self.file.write(header + str)

    def write_to_azure(self):
        # check for internet
        # if not self.internet_on():
        #    return False

        # record file to cloud
        self.append_blob_service = AppendBlobService(
            account_name='navview',
            account_key=
            '+roYuNmQbtLvq2Tn227ELmb6s1hzavh0qVQwhLORkUpM0DN7gxFc4j+DF/rEla1EsTN2goHEA1J92moOM/lfxg==',
            protocol='http')
        self.append_blob_service.create_blob(
            container_name='data',
            blob_name=self.name,
            content_settings=ContentSettings(content_type='text/plain'))
        f = open("data/" + self.name, "r")
        self.append_blob_service.append_blob_from_text('data', self.name,
                                                       f.read())

        # TODO: check if success

        # record record to ansplatform
        self.record_to_ansplatform()

    def record_to_ansplatform(self):
        data = {
            "pn": self.pn,
            "sn": self.sn,
            "fileName": self.user['fileName'],
            "url": self.name,
            "imuProperties": json.dumps(self.imu_properties),
            "sampleRate": self.sample_rate,
            "packetType": self.packet_type,
            "userId": self.user['id']
        }
        url = "https://ans-platform.azurewebsites.net/api/datafiles/replaceOrCreate"
        data_json = json.dumps(data)
        headers = {
            'Content-type': 'application/json',
            'Authorization': self.user['access_token']
        }
        response = requests.post(url, data=data_json, headers=headers)
        response = response.json()
        print(response)

        # clean up
        self.file.close()
        self.name = ''

        return  #ends thread

    def internet_on(self):
        try:
            urllib2.urlopen('https://ans-platform.azurewebsites.net',
                            timeout=1)
            return True
        except urllib2.URLError as err:
            return False

    def close(self):
        time.sleep(0.1)
        threading.Thread(target=self.write_to_azure).start()
      FROM Kwinana_Plc_notification t1 join Kwinana_Plc_refference_data
      t2 on t1.Commodity = t2.Commodity order by AutoId DESC"""


def wa_timenow():
    #get current perth time
    dtn = datetime.now() + timedelta(hours=8)
    return dtn


cursor.execute(sql)
row = cursor.fetchone()
outval = ''
if row[0] == None:
    outval = ",".join([str(i) for i in row[1:]])

    #connect to request-log blob and log request
    append_blob_service = AppendBlobService(account_name=myaccount,
                                            account_key=mykey)
    append_blob_service.append_blob_from_text(
        'requestlogs', 'request.txt', "%s,%s " % (wa_timenow(), outval))
    append_blob_service.append_blob_from_text('requestlogs', 'request.txt',
                                              "\n")
    append_blob = append_blob_service.get_blob_to_text('requestlogs',
                                                       'request.txt')

#send response
response = open(os.environ['res'], 'w')

response.write(outval)
response.close()
Example #10
0
# generator = block_blob_service.list_blobs('mycontainer')
# for blob in generator:
#     print(blob.name)


# block_blob_service.get_blob_to_path('mycontainer', 'myblockblob', 'out-sunset.png')


# block_blob_service.delete_blob('mycontainer', 'myblockblob')





from azure.storage.blob import AppendBlobService

append_blob_service = AppendBlobService(account_name='bobur', account_key='6e60FZapOXAmUbFBw0SpE1lHRP3RkXOMYRaalWmRBoz4+xI5tvjaJzxXuYyt+yfWxjPXpz5X3PmyIFiQmSkjbw==')

# The same containers can hold all types of blobs
append_blob_service.create_container('myappendcontainer')

# Append blobs must be created before they are appended to
append_blob_service.create_blob('myappendcontainer', 'myappendblob')
append_blob_service.append_blob_from_text('myappendcontainer', 'myappendblob', u'Hello, world!')

append_blob = append_blob_service.get_blob_to_text('myappendcontainer', 'myappendblob')

print append_blob.content

class FileLoger():
    def __init__(self):
        '''Initialize and create a CSV file
        '''
        start_time = datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
        self.rover_properties = utility.load_configuration(os.path.join('setting', 'rover.json'))
        if not self.rover_properties:
            os._exit(1)
        if not os.path.exists('data/'):
            os.mkdir('data/')
        self.output_packets = self.rover_properties['userMessages']['outputPackets']
        self.log_file_rows = {}
        self.log_file_names = {}
        self.log_files_obj = {}
        self.log_files = {}
        self.user_file_name = '' # the prefix of log file name.
        self.msgs_need_to_log = []
        self.ws = False
        # azure app.
        self.user_id = ''
        self.file_name = ''
        self.sas_token = '' 
        self.db_user_access_token = ''
        self.host_url = self.rover_properties['userConfiguration']['hostURL']

        #
        self.threads = []  # thread of receiver and paser
        self.exit_thread = False  # flag of exit threads
        self.exit_lock = threading.Lock()  # lock of exit_thread
        self.data_dict = {}  # data container
        self.data_lock = threading.Lock()  # lock of data_queue

    def start_user_log(self, file_name='', ws=False):
        '''
        start log.
        return:
                0: OK
                1: exception that has started logging already.
                2: other exception.
        '''
        try:
            if len(self.log_file_rows) > 0:
                return 1 # has started logging already.

            self.ws = ws
            self.exit_thread = False
            self.user_file_name = file_name
            start_time = datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
            for packet in self.output_packets:
                if 1 == packet['save2file']:
                    self.msgs_need_to_log.append(packet['name'])
                    self.log_file_rows[packet['name']] = 0
                    if self.user_file_name == '':
                        self.log_file_names[packet['name']] = packet['name'] +'-' + start_time + '.csv'
                    else:
                        self.log_file_names[packet['name']] = self.user_file_name + '_' + packet['name'] +'-' + start_time + '.csv'
                    self.log_files[packet['name']] = self.log_file_names[packet['name']]
                    self.log_files_obj[packet['name']] = open('data/' + self.log_file_names[packet['name']], 'w')    

            if self.ws:
                self.get_sas_token()
                self.data_dict.clear()
                for i, (k, v) in enumerate(self.log_files.items()): # k:pack type  v:log file name
                    self.data_dict[v]=''
                    threading.Thread(target=self.upload_azure, args=(k,v)).start()
            return 0
        except Exception as e:
            print('Exception! File:[{0}], Line:[{1}]. Exception:{2}'.format(__file__, sys._getframe().f_lineno, e))
            return 2

    def stop_user_log(self):
        '''
        stop log.
        return:
                0: OK
                1: exception that driver hasn't started logging files yet.
                2: other exception.
        '''
        rev = 0
        try:
            if len(self.log_file_rows) == 0:
                return 1 # driver hasn't started logging files yet.
            for i, (k, v) in enumerate(self.log_files_obj.items()):
                v.close()
            self.log_file_rows.clear()
            self.log_file_names.clear()
            self.log_files_obj.clear()
            rev = 0
        except Exception as e:
            print(e)
            rev = 2

        if self.ws:
            time.sleep(1)
            self.exit_lock.acquire()
            self.exit_thread = True
            self.exit_lock.release()
            self.ws = False
            
        return rev

    # def upload_callback(self, current, total):
    #     print('({}, {})'.format(current, total))

    def upload_azure(self, packet_type, log_file_name):
        if self.db_user_access_token == '' or self.sas_token == '':
            print("Error: Can not upload log to azure since token is empty! Please check the network.")
            
        print(datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S:'), log_file_name, ' start.')

        accountName = 'navview'
        countainerName = 'data-1000'
        fileName = log_file_name
        bcreate_blob_ok = False

        error_connection = 'ConnectionError'
        error_authorization = 'AuthenticationFailed'
        ii=0
        while True:
            # get data from data_dict.
            self.data_lock.acquire()
            text = self.data_dict[log_file_name]
            self.data_dict[log_file_name] = ''
            self.data_lock.release()

            # check if user stop logging data.
            self.exit_lock.acquire()
            if self.exit_thread:
                # check for internet and text
                if text == '' or (not self.internet_on()):
                    self.exit_lock.release()
                    break
                else:
                    pass
            self.exit_lock.release()

            #let CPU have a break.
            if text == '' : 
                time.sleep(1)
                continue

            #create blob on azure
            if not bcreate_blob_ok:
                try:
                    self.append_blob_service = AppendBlobService(account_name=accountName,
                                                                sas_token=self.sas_token,
                                                                protocol='http')
                    self.append_blob_service.create_blob(container_name=countainerName, blob_name=fileName,
                                                        content_settings=ContentSettings(content_type='text/plain'))
                    bcreate_blob_ok = True
                    threading.Thread(target=self.save_to_db_task, args=(packet_type, log_file_name)).start()
                except Exception as e:
                    # print('Exception when create_blob:', type(e), e)
                    if error_connection in str(e):
                        pass
                    elif error_authorization in str(e):
                        self.get_sas_token()
                        self.append_blob_service = AppendBlobService(account_name=accountName,
                                                                    sas_token=self.sas_token,
                                                                    protocol='http')
                    print('Retry to create_blob again...')
                    continue

            # append blob on azure
            try:
                # self.append_blob_service.append_blob_from_text(countainerName, fileName, text, progress_callback=self.upload_callback)
                self.append_blob_service.append_blob_from_text(countainerName, fileName, text)
            except Exception as e:
                # print('Exception when append_blob:', type(e), e)
                if error_connection in str(e):
                    pass
                elif error_authorization in str(e):
                    self.get_sas_token()
                    self.append_blob_service = AppendBlobService(account_name=accountName,
                                                                sas_token=self.sas_token,
                                                                protocol='http')
                    # if append blob failed, do not drop 'text', but push 'text' to data_dict and re-append next time.
                    self.data_lock.acquire()
                    self.data_dict[log_file_name] = text + self.data_dict[log_file_name]
                    self.data_lock.release()

        if bcreate_blob_ok:
            # if not self.save_to_ans_platform(packet_type, log_file_name):
            #     print('save_to_ans_platform failed.')
            print(datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S:') , log_file_name, ' done.')

    def save_to_db_task(self, packet_type, file_name):
        if not self.save_to_ans_platform(packet_type, file_name):
            print('save_to_ans_platform failed.')

    def update(self, packet, packet_type, is_var_len_frame):
        if len(self.log_file_rows) == 0: #if hasn't started logging.
            return

        if packet_type in self.msgs_need_to_log:
            if is_var_len_frame:
                self.log_var_len(packet, packet_type)
            else:
                self.log(packet, packet_type)

    def get_log_file_names(self):
        return self.log_file_names.copy()

    def log(self, data, packet_type):
        ''' Parse the data, read in from the unit, and generate a data file using
            the json properties file to create a header and specify the precision
            of the data in the resulting data file.
        '''
        output_packet = next((x for x in self.output_packets if x['name'] == packet_type), None)

        '''Write row of CSV file based on data received.  Uses dictionary keys for column titles
        '''
        if self.log_file_rows[packet_type] == 0:
            # Loop through each item in the data dictionary and create a header from the json
            #   properties that correspond to the items in the dictionary
            labels = ''
            # for key in data:
            for i, (k, v) in enumerate(data.items()):
                '''dataStr = output_packet['payload'][i]['name'] + \
                          ' [' + \
                          output_packet['payload'][i]['unit'] + \
                          ']'''
                dataStr = output_packet['payload'][i]['name']
                unitStr = output_packet['payload'][i]['unit']
                if unitStr == '':
                    labels = labels + '{0:s},'.format(dataStr)
                else:
                    labels = labels + '{0:s} ({1:s}),'.format(dataStr, unitStr)

            # Remove the comma at the end of the string and append a new-line character
            labels = labels[:-1]
            header = labels + '\n'
        else:
            header = ''

        self.log_file_rows[packet_type] += 1

        # Loop through the items in the data dictionary and append to an output string
        #   (with precision based on the data type defined in the json properties file)
        str = ''
        for i, (k, v) in enumerate(data.items()):
            outputPcktType = output_packet['payload'][i]['type']

            if outputPcktType == 'uint32' or outputPcktType == 'int32' or \
               outputPcktType == 'uint16' or outputPcktType == 'int16' or \
               outputPcktType == 'uint64' or outputPcktType == 'int64':
                # integers and unsigned integers
                str += '{0:d},'.format(v)
            elif outputPcktType == 'double':
                # double
                str += '{0:0.8f},'.format(v)# 15.12
            elif outputPcktType == 'float':
                str += '{0:0.4f},'.format(v) # 12.8
            elif outputPcktType == 'uint8':
                # byte
                str += '{0:d},'.format(v)
            elif outputPcktType == 'uchar' or outputPcktType == 'char' or outputPcktType == 'string':
                # character
                str += '{:},'.format(v)
            else:
                # unknown
                str += '{0:3.5f},'.format(v)
        # 
        str = header + str[:-1] + '\n'

        self.log_files_obj[packet_type].write(str)
        self.log_files_obj[packet_type].flush()

        if self.ws:
            self.data_lock.acquire()
            self.data_dict[self.log_files[packet_type]] = self.data_dict[self.log_files[packet_type]] + str
            self.data_lock.release()

    def log_var_len(self, data, packet_type):
        ''' Parse the data, read in from the unit, and generate a data file using
            the json properties file to create a header and specify the precision
            of the data in the resulting data file.
        '''
        output_packet = next((x for x in self.output_packets if x['name'] == packet_type), None)

        '''Write row of CSV file based on data received.  Uses dictionary keys for column titles
        '''
        if self.log_file_rows[packet_type] == 0:

            # Loop through each item in the data dictionary and create a header from the json
            #   properties that correspond to the items in the dictionary
            labels = ''
            for value in output_packet['payload']:
                dataStr = value['name']
                unitStr = value['unit']
                if unitStr == '':
                    labels = labels + '{0:s},'.format(dataStr)
                else:
                    labels = labels + '{0:s} ({1:s}),'.format(dataStr, unitStr)
            # Remove the comma at the end of the string and append a new-line character
            labels = labels[:-1]
            header = labels + '\n'
        else:
            header = ''

        self.log_file_rows[packet_type] += 1

        # Loop through the items in the data dictionary and append to an output string
        #   (with precision based on the data type defined in the json properties file)
        str = ''
        const_str = ''
        var_str = ''
        var_fileld_tpyes = []
        var_fileld_num = len(output_packet['payload']) - output_packet['var_num']['field_idx']
        const_fileld_num = len(output_packet['payload']) - var_fileld_num

        for idx, value in enumerate(output_packet['payload']):
            if idx >= const_fileld_num:
                var_fileld_tpyes.append(value['type'])

        for idx, key in enumerate(data):
            if idx == 0: # handle const filelds which are all in the first item of data.
                for i, (k, v) in enumerate(key.items()):
                    outputPcktType = output_packet['payload'][i]['type']

                    if outputPcktType == 'uint32' or outputPcktType == 'int32' or \
                    outputPcktType == 'uint16' or outputPcktType == 'int16' or \
                    outputPcktType == 'uint64' or outputPcktType == 'int64':
                        # integers and unsigned integers
                        const_str += '{0:d},'.format(v)
                    elif outputPcktType == 'double':
                        # double
                        const_str += '{0:0.12f},'.format(v) # 15.12
                    elif outputPcktType == 'float':
                        const_str += '{0:0.4f},'.format(v) # 12.8
                    elif outputPcktType == 'uint8':
                        # byte
                        const_str += '{0:d},'.format(v)
                    elif outputPcktType == 'uchar' or outputPcktType == 'char' or outputPcktType == 'string':
                        # character
                        const_str += '{:},'.format(v)
                    else:
                        # unknown
                        const_str += '{0:3.5f},'.format(v)
            else:
                for i, (k, v) in enumerate(key.items()):
                    outputPcktType = var_fileld_tpyes[i]
                    if outputPcktType == 'uint32' or outputPcktType == 'int32' or \
                    outputPcktType == 'uint16' or outputPcktType == 'int16' or \
                    outputPcktType == 'uint64' or outputPcktType == 'int64':
                        # integers and unsigned integers
                        var_str += '{0:d},'.format(v)
                    elif outputPcktType == 'double':
                        # double
                        var_str += '{0:15.12f},'.format(v)# 15.12
                    elif outputPcktType == 'float':
                        var_str += '{0:12.4f},'.format(v) # 12.8
                    elif outputPcktType == 'uint8':
                        # byte
                        var_str += '{0:d},'.format(v)
                    elif outputPcktType == 'uchar' or outputPcktType == 'char':
                        # character
                        var_str += '{:},'.format(v)
                    else:
                        # unknown
                        var_str += '{0:3.5f},'.format(v)

                str = const_str + var_str
                str = header + str[:-1] + '\n'

                self.log_files_obj[packet_type].write(str)
                self.log_files_obj[packet_type].flush()

                if self.ws:
                    self.data_lock.acquire()
                    self.data_dict[self.log_files[packet_type]] = self.data_dict[self.log_files[packet_type]] + str
                    self.data_lock.release()

                header = ''
                str = ''
                var_str = ''

    def set_user_id(self, user_id):
        self.user_id = user_id
        if not isinstance(self.user_id, str):
            self.user_id = str(self.user_id)

    def set_user_access_token(self, access_token):
        self.db_user_access_token = access_token

    def upload(self, log_file_names):
        t = threading.Thread(target=self.upload_to_azure_task, args=(log_file_names, ))
        t.start()

    def upload_to_azure_task(self, log_files_dict):
        self.get_sas_token()
        if self.db_user_access_token != '' and self.sas_token != '':
            print(datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S:') , 'Start.')
            for i, (k, v) in enumerate(log_files_dict.items()): # k: packet type; v: log file name
                print('upload:', v)
                self.upload_to_azure(k, v)
            # self.db_user_access_token = ''
            # self.sas_token = ''    
            print(datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S:') , 'Done.')

    def get_sas_token(self):
        try:
            url = self.host_url + "token/storagesas"
            headers = {'Content-type': 'application/json', 'Authorization': self.db_user_access_token}
            response = requests.post(url, headers=headers)
            rev = response.json()
            if 'token' in rev:
                self.sas_token = rev['token']
            else:
                self.sas_token = ''
                print('Error: Get sas token failed!')
        except Exception as e:
            print('Exception when get_sas_token:', e)

    def upload_to_azure(self, packet_type, file_name):
        ''' Upload CSV's to Azure container.
        '''
        f = open("data/" + file_name, "r")
        text = f.read() #.decode("utf-8")

        try:
            self.azure_storage('navview', self.sas_token, 'data', file_name, text)
        except Exception as e:
            print('azure_storage exception:', e)
            return
            # Try again!
            # self.azure_storage('navview', self.sas_token, 'data', file_name, text)
            pass

        ''' Trigger Database upload
        '''
        rev = self.save_to_ans_platform(packet_type, file_name)
        if not rev:
            print('save_to_ans_platform failed.')

    def azure_storage(self, accountName, sasToken, countainerName,fileName,text):
        if 0:
            self.append_blob_service = AppendBlobService(account_name=accountName,
                                                        sas_token=sasToken,
                                                        protocol='http')
            self.append_blob_service.create_blob(container_name=countainerName, blob_name=fileName,
                                                content_settings=ContentSettings(content_type='text/plain'))
            self.append_blob_service.append_blob_from_text(countainerName, fileName, text)
        else:
            self.block_blob_service = BlockBlobService(account_name=accountName,
                                                        sas_token=sasToken,
                                                        protocol='http')
            self.block_blob_service.create_blob_from_text(  container_name= countainerName,
                                                        blob_name= fileName,
                                                        text=text,
                                                        content_settings=ContentSettings(content_type='text/plain'))

    def save_to_ans_platform(self, packet_type, file_name):
        ''' Upload CSV related information to the database.
        '''
        try:
            data = {"type": 'INS', "model": 'INS1000', "fileName": file_name, "url": file_name, "userId": self.user_id, 
                    "logInfo": { "pn": '11', "sn": '', "packetType":packet_type,"insProperties":json.dumps(self.rover_properties)}}

            url = self.host_url + "api/recordLogs/post"
            data_json = json.dumps(data)
            headers = {'Content-type': 'application/json', 'Authorization': self.db_user_access_token}
            response = requests.post(url, data=data_json, headers=headers)
            return True if 'success' in response.json() else False
        except Exception as e:
            print('Exception when update db:', e)

    def internet_on(self):
        try:
            url = 'https://navview.blob.core.windows.net/'
            if sys.version_info[0] > 2:
                import urllib.request
                response = urllib.request.urlopen(url, timeout=1)
            else:
                import urllib2
                response = urllib2.urlopen(url, timeout=1)
            # print(response.read())
            return True
        except urllib2.URLError as err: 
            return False
Example #12
0
def main(msg: func.QueueMessage) -> None:
    # consume videoname from event that is put on blob storage using filename

    logging.info('Python queue trigger function processed a queue item: %s',
                 msg.get_body().decode('utf-8'))

    raw = msg.get_body().decode('utf-8')
    logging.info(raw)
    eventVideo = json.loads(raw)

    blockBlobService = BlockBlobService(
        account_name=os.environ['remoteStorageAccountName'],
        account_key=os.environ['remoteStorageAccountKey'])
    appendBlobService = AppendBlobService(
        account_name=os.environ['remoteStorageAccountName'],
        account_key=os.environ['remoteStorageAccountKey'])

    cap = getVideo(blockBlobService, eventVideo)
    folder = eventVideo["filename"]

    # process video
    x = 1
    frameRate = cap.get(5)  #frame rate
    numberOfPicturesPerSecond = int(os.environ["numberOfPicturesPerSecond"])
    # start creating frames from video
    while (cap.isOpened()):
        frameId = cap.get(1)  #current frame number
        ret, frame = cap.read()
        if (ret != True):
            break

        # in case frame matches a multiple of the frame, create image
        if frameId % math.floor(frameRate / numberOfPicturesPerSecond) == 0:
            logging.info("create cap" + str(x))
            # convert frame to PIL image
            frame_conv = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
            pilImage = Image.fromarray(frame_conv)
            imgByteArr = BytesIO()
            pilImage.save(imgByteArr, format='PNG')
            imgByteArr = imgByteArr.getvalue()

            # write image to blob for logging
            imageFileName = folder + "/log/image" + str(int(x)) + "_img.png"
            blockBlobService.create_blob_from_bytes(
                os.environ['remoteStorageOutputContainer'], imageFileName,
                imgByteArr)

            # try to recognize objects and text from image
            description, caption, confidence, text, fullText = analyzeImage(
                imgByteArr)
            # in case trainnumber is recognized of train, public to Power BI
            trainNumber = ""
            if (len(text) == 4 and text.isdigit()) or text[:2] == "NL":
                trainNumber = text
                publishPowerBI(blockBlobService, x, imgByteArr, folder,
                               eventVideo, text, caption)

            # write identification of image to csv
            loggingCsv = "\n" + str(imageFileName) + "|" + str(
                caption) + "|" + str(confidence) + "|" + str(
                    json.dumps(text).replace(
                        '|', ':pipe')) + "|" + str(trainNumber) + "|" + str(
                            json.dumps(fullText).replace('|', ':pipe'))
            appendBlobService.append_blob_from_text("logging",
                                                    os.environ["loggingcsv"],
                                                    loggingCsv)

            # increment image
            x += 1
Example #13
0
# Configure account name with the Azure Storage Account Name and the account Key from Storage Explorer
append_blob_service = AppendBlobService(account_name='storage_account_name',
                                        account_key='storage_account_key')

# Reads the start and stop index passed in through SLURM
start = int(sys.argv[1])
stop = int(sys.argv[2])

#Creates the blob for this batch.
append_blob_service.create_blob('distances',
                                str(start) + "-" + str(stop) + '.csv')

#Logs the start time
append_blob_service.append_blob_from_text(
    'distances', 'log.txt', "Starting " + str(start) + "-" + str(stop) + ":" +
    datetime.datetime.fromtimestamp(
        time.time()).strftime('%Y-%m-%d %H:%M:%S') + "\n")

LatLongDict = {}
# radius of earth in miles
R = 3959.0

# Reads the airport data in to a list for easy access.
with open('airports-world.csv') as csvfile:
    reader = csv.DictReader(csvfile)
    for row in reader:
        LatLongDict[row['LocationID']] = [row['Latitude'], row['Longitude']]

#Creates the column names for the distance table
fieldnames = "id"
for code1 in LatLongDict:
Example #14
0
class RoverLogApp(rover_application_base.RoverApplicationBase):
    def __init__(self, user=False):
        '''Initialize and create a CSV file
        '''

        if user and list(user.keys())[0] == 'startLog':
            self.username = user['startLog']['username']
            self.userId = user['startLog']['id']
            self.userFilename = user['startLog']['fileName']
            self.userAccessToken = user['startLog']['access_token']

        self.start_time = datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
        if not self.load_configuration():
            os._exit(1)
        if not os.path.exists('data/'):
            os.mkdir('data/')
        self.output_packets = self.rover_properties['userMessages'][
            'outputPackets']
        self.first_row = {}
        self.log_file_names = {}
        self.log_files = {}

        if user:
            with open('tempLogFiles.json', 'r') as outfile:
                data = json.load(outfile)
                self.log_file_names = data

        if not user:
            with open('tempLogFiles.json', 'w') as outfile:
                json.dump({}, outfile)
            try:
                for packet in self.output_packets:
                    self.first_row[packet['name']] = 0
                    self.log_file_names[packet['name']] = packet[
                        'name'] + '-' + self.start_time + '.csv'
                    self.log_files[packet['name']] = open(
                        'data/' + self.log_file_names[packet['name']],
                        'w')  # just log Compact Navigation Message

                    entry = {
                        packet['name']: self.log_file_names[packet['name']]
                    }

                    with open('tempLogFiles.json') as f:
                        data = json.load(f)

                    data.update(entry)
                    with open('tempLogFiles.json', 'w') as f:
                        json.dump(data, f)
            except:
                pass

        if user and list(user.keys())[0] == 'startLog':
            self.savetoAnsPlatform()

        if user and list(user.keys())[0] == 'stopLog':
            time.sleep(10)
            self.close()
            # os.remove("tempLogFiles.json")

    def on_reinit(self):
        print("RoverLogApp.on_reinit()")
        pass
        # Is it necessary to create a new file to log when replug serial connector?

        # self.start_time = datetime.datetime.now().strftime('%Y%m%d_%H_%M_%S')
        # try:
        #     for packet in self.output_packets:
        #         self.first_row[packet['name']] = 0
        #         self.log_file_names[packet['name']] = packet['name'] +'-' + self.start_time + '.csv'
        #         self.log_files[packet['name']] = open('data/' + self.log_file_names[packet['name']], 'w')# just log Compact Navigation Message
        # except:
        #     pass

    def on_find_active_rover(self):
        print("RoverLogApp.on_find_active_rover()")

    def on_message(self, *args):
        packet_type = args[0]
        self.data = args[1]
        is_var_len_frame = args[2]
        if is_var_len_frame:
            self.log_var_len(self.data, packet_type)
        else:
            self.log(self.data, packet_type)

    def on_exit(self):
        pass

    def load_configuration(self):
        '''
        load properties from 'rover.json'
        returns: True when load successfully.
                 False when load failed.
        '''
        try:
            with open('setting/rover.json') as json_data:
                self.rover_properties = json.load(json_data)
            return True
        # except (ValueError, KeyError, TypeError) as error:
        except Exception as e:
            print(e)
            return False

    def log(self, data, packet_type):
        ''' Parse the data, read in from the unit, and generate a data file using
            the json properties file to create a header and specify the precision
            of the data in the resulting data file.
        '''
        if not self.rover_properties:
            return

        output_packet = next(
            (x for x in self.rover_properties['userMessages']['outputPackets']
             if x['name'] == packet_type), None)
        '''Write row of CSV file based on data received.  Uses dictionary keys for column titles
        '''
        if not self.first_row[packet_type]:
            self.first_row[packet_type] = 1

            # Loop through each item in the data dictionary and create a header from the json
            #   properties that correspond to the items in the dictionary
            labels = ''
            keyIdx = -1
            for key in data:
                keyIdx = keyIdx + 1
                '''dataStr = output_packet['payload'][keyIdx]['name'] + \
                          ' [' + \
                          output_packet['payload'][keyIdx]['unit'] + \
                          ']'''
                dataStr = output_packet['payload'][keyIdx]['name']
                unitStr = output_packet['payload'][keyIdx]['unit']
                if unitStr == '':
                    labels = labels + '{0:s},'.format(dataStr)
                else:
                    labels = labels + '{0:s}({1:s}),'.format(dataStr, unitStr)

            # Remove the comma at the end of the string and append a new-line character
            labels = labels[:-1]
            header = labels + '\n'
        else:
            self.first_row[packet_type] += 1
            header = ''

        # Loop through the items in the data dictionary and append to an output string
        #   (with precision based on the data type defined in the json properties file)
        str = ''
        keyIdx = -1
        for key in data:
            keyIdx = keyIdx + 1
            outputPcktType = output_packet['payload'][keyIdx]['type']

            if outputPcktType == 'uint32' or outputPcktType == 'int32' or \
               outputPcktType == 'uint16' or outputPcktType == 'int16' or \
               outputPcktType == 'uint64' or outputPcktType == 'int64':
                # integers and unsigned integers
                str += '{0:d},'.format(data[key])
            elif outputPcktType == 'double':
                # double
                str += '{0:15.12f},'.format(data[key])
            elif outputPcktType == 'float':
                # print(3) #key + str(2))
                str += '{0:12.8f},'.format(data[key])
            elif outputPcktType == 'uint8':
                # byte
                str += '{0:d},'.format(data[key])
            elif outputPcktType == 'uchar' or outputPcktType == 'char':
                # character
                str += '{:},'.format(data[key])
            else:
                # unknown
                str += '{0:3.5f},'.format(data[key])
        #
        str = str[:-1]
        str = str + '\n'
        self.log_files[packet_type].write(header + str)

    def log_var_len(self, data, packet_type):
        ''' Parse the data, read in from the unit, and generate a data file using
            the json properties file to create a header and specify the precision
            of the data in the resulting data file.
        '''
        if not self.rover_properties:
            return

        output_packet = next(
            (x for x in self.rover_properties['userMessages']['outputPackets']
             if x['name'] == packet_type), None)
        '''Write row of CSV file based on data received.  Uses dictionary keys for column titles
        '''
        if not self.first_row[packet_type]:
            self.first_row[packet_type] = 1

            # Loop through each item in the data dictionary and create a header from the json
            #   properties that correspond to the items in the dictionary
            labels = ''
            for value in output_packet['payload']:
                dataStr = value['name']
                unitStr = value['unit']
                if unitStr == '':
                    labels = labels + '{0:s},'.format(dataStr)
                else:
                    labels = labels + '{0:s}({1:s}),'.format(dataStr, unitStr)
            # Remove the comma at the end of the string and append a new-line character
            labels = labels[:-1]
            header = labels + '\n'
        else:
            self.first_row[packet_type] += 1
            header = ''

        # Loop through the items in the data dictionary and append to an output string
        #   (with precision based on the data type defined in the json properties file)
        str = ''
        const_str = ''
        var_str = ''
        var_fileld_tpyes = []
        var_fileld_num = len(
            output_packet['payload']) - output_packet['var_num']['field_idx']
        const_fileld_num = len(output_packet['payload']) - var_fileld_num

        for idx, value in enumerate(output_packet['payload']):
            if idx >= const_fileld_num:
                var_fileld_tpyes.append(value['type'])

        for idx, key in enumerate(data):
            if idx < const_fileld_num:
                outputPcktType = output_packet['payload'][idx]['type']

                if outputPcktType == 'uint32' or outputPcktType == 'int32' or \
                outputPcktType == 'uint16' or outputPcktType == 'int16' or \
                outputPcktType == 'uint64' or outputPcktType == 'int64':
                    # integers and unsigned integers
                    const_str += '{0:d},'.format(list(key.values())[0])
                elif outputPcktType == 'double':
                    # double
                    const_str += '{0:15.12f},'.format(list(key.values())[0])
                elif outputPcktType == 'float':
                    # print(3) #key + str(2))
                    const_str += '{0:12.8f},'.format(list(key.values())[0])
                elif outputPcktType == 'uint8':
                    # byte
                    const_str += '{0:d},'.format(list(key.values())[0])
                elif outputPcktType == 'uchar' or outputPcktType == 'char':
                    # character
                    const_str += '{:},'.format(list(key.values())[0])
                else:
                    # unknown
                    const_str += '{0:3.5f},'.format(key.values()[0])
            else:
                idx_key = -1
                for k, v in key.items():
                    idx_key += 1
                    outputPcktType = var_fileld_tpyes[idx_key]
                    if outputPcktType == 'uint32' or outputPcktType == 'int32' or \
                    outputPcktType == 'uint16' or outputPcktType == 'int16' or \
                    outputPcktType == 'uint64' or outputPcktType == 'int64':
                        # integers and unsigned integers
                        var_str += '{0:d},'.format(v)
                    elif outputPcktType == 'double':
                        # double
                        var_str += '{0:15.12f},'.format(v)
                    elif outputPcktType == 'float':
                        # print(3) #key + str(2))
                        var_str += '{0:12.8f},'.format(v)
                    elif outputPcktType == 'uint8':
                        # byte
                        var_str += '{0:d},'.format(v)
                    elif outputPcktType == 'uchar' or outputPcktType == 'char':
                        # character
                        var_str += '{:},'.format(v)
                    else:
                        # unknown
                        var_str += '{0:3.5f},'.format(v)

                str = const_str + var_str
                str = str[:-1]
                str = str + '\n'
                self.log_files[packet_type].write(header + str)
                header = ''
                str = ''
                var_str = ''

    ''' Upload CSV's to Azure container.
    '''

    def uploadtoAzure(self, fileDisplayName):

        # f = open("data/" + self.user['fileName'], "r")
        f = open("data/" + fileDisplayName, "r")
        text = f.read()
        account_key = '+roYuNmQbtLvq2Tn227ELmb6s1hzavh0qVQwhLORkUpM0DN7gxFc4j+DF/rEla1EsTN2goHEA1J92moOM/lfxg=='

        try:
            self.azureStorage('navview', account_key, 'data', fileDisplayName,
                              text)
        except:
            # Try again!
            self.azureStorage('navview', account_key, 'data', fileDisplayName,
                              text)
        ''' Trigger Database upload
        '''
        # self.savetoAnsPlatform()

    def azureStorage(self, accountName, accountkey, countainerName, fileName,
                     text):
        self.append_blob_service = AppendBlobService(account_name=accountName,
                                                     account_key=accountkey,
                                                     protocol='http')
        self.append_blob_service.create_blob(
            container_name=countainerName,
            blob_name=fileName,
            content_settings=ContentSettings(content_type='text/plain'))
        self.append_blob_service.append_blob_from_text(countainerName,
                                                       fileName, text)

    ''' Upload CSV related information to the database.
    '''

    def savetoAnsPlatform(self):
        for files in self.log_file_names:
            fileDisplayName = files + "-" + self.userFilename + ".csv"

            data = {
                "pn": '1.0.0',
                "sn": 'rtk',
                "fileName": fileDisplayName,
                "url": self.log_file_names[files],
                "imuProperties": json.dumps(self.rover_properties),
                "sampleRate": '100',
                "packetType": files,
                "userId": self.userId
            }

            url = "https://api.aceinna.com/api/datafiles/replaceOrCreate"
            data_json = json.dumps(data)
            headers = {
                'Content-type': 'application/json',
                'Authorization': self.userAccessToken
            }
            response = requests.post(url, data=data_json, headers=headers)
            response = response.json()

    # def close(self,fileName,storedFile):
    #     time.sleep(0.1)
    #     # if self.ws:
    #     storedFile.close()
    #     threading.Thread(target=self.uploadtoAzure(fileName)).start()
    #     # else:
    #     #     self.file.close()
    #     # print('close')
    #     # try:
    #     #     for packet in self.output_packets:
    #     #         self.log_files[packet['name']].close()
    #     #         threading.Thread(target=self.write_to_azurelog_files[packet['name']]).start()
    #     # except:
    #     #     pass

    def close(self):
        for files in self.log_file_names:
            self.uploadtoAzure(self.log_file_names[files])
Example #15
0
class LogIMU380Data:
    def __init__(self):
        '''Initialize and create a blob with CSV extension
        '''
        self.name = 'data-' + datetime.datetime.now().strftime(
            '%Y_%m_%d_%H_%M_%S') + '.csv'
        self.append_blob_service = AppendBlobService(
            account_name='navview',
            account_key=
            '+roYuNmQbtLvq2Tn227ELmb6s1hzavh0qVQwhLORkUpM0DN7gxFc4j+DF/rEla1EsTN2goHEA1J92moOM/lfxg==',
            protocol='http')
        self.append_blob_service.create_blob(
            container_name='data',
            blob_name=self.name,
            content_settings=ContentSettings(content_type='text/plain'))
        self.first_row = 0
        self.write_str = ''

    def log(self, data, odr_setting):
        '''Buffers and then stores stream based on ODR.  Must buffer due to cloud write time.  
            Uses dictionary keys for column titles
        '''
        odr_rates = {
            0: 0,
            1: 100,
            2: 50,
            5: 25,
            10: 20,
            20: 10,
            25: 5,
            50: 2
        }
        delta_t = 1.0 / odr_rates[odr_setting]

        if not self.first_row:
            self.first_row = 1
            header = ''.join('{0:s},'.format(key) for key in data)
            header = header[:-1]
            header = 'sample,' + header
            header = header + '\r\n'
        else:
            header = ''
            self.first_row += 1

        str = ''
        for key in data:
            if key == 'BITstatus' or key == 'GPSITOW' or key == 'counter' or key == 'timeITOW':
                str += '{0:d},'.format(data[key])
            else:
                str += '{0:3.5f},'.format(data[key])

        str = str[:-1]
        str = '{0:5.2f},'.format(delta_t * (self.first_row - 1)) + str
        str = str + '\r\n'
        self.write_str = self.write_str + header + str

        if (self.first_row % 100 == 0):
            self.write_to_azure()

    def write_to_azure(self):
        '''Appends buffered CSV string to current Azure blob
        '''
        self.append_blob_service.append_blob_from_text('data', self.name,
                                                       self.write_str)
        self.write_str = ''

    def close(self):
        '''Closes blob
        '''
        self.write_to_azure()
        self.name = ''
Example #16
0
import subprocess
import datetime
import time

from azure.storage.blob import AppendBlobService

# Configure account name with the Azure Storage Account Name and the account Key from Storage Explorer
append_blob_service = AppendBlobService(account_name='storage_account_name',
                                        account_key='storage_account_key')

# Creates an append blob for this app.
append_blob_service.create_container('distances')
append_blob_service.create_blob('distances', 'log.txt')

append_blob_service.append_blob_from_text(
    'distances', 'log.txt', "Starting: " + datetime.datetime.fromtimestamp(
        time.time()).strftime('%Y-%m-%d %H:%M:%S') + "\n")

LatLongDict = {}

# Reads the number of jobs from the command line.
jobCount = int(sys.argv[1])

# Reads the airport data in to a list for easy access.
with open('airports-world.csv') as csvfile:
    reader = csv.DictReader(csvfile)
    for row in reader:
        LatLongDict[row['LocationID']] = [row['Latitude'], row['Longitude']]

# Configures the job size based on the job count passed in.
jobSize = int(len(LatLongDict) / jobCount) + 1
Example #17
0
import datetime
import pandas as pd
input = pd.read_csv("auto.csv")
# input.Symbol[i] input.Series.size is the size
all_stock_codes = input.Symbol
from nsetools import Nse
nse = Nse()
outputlist = []
for script_code in all_stock_codes:
    try:
        print(script_code)
        q = nse.get_quote(script_code)
        outputlist.append(q)
        print(q)
# Write to a list
    except:
        print("Unexpected error:",
              sys.exc_info()[0])

jsonObject = json.dumps(outputlist)
from azure.storage.blob import AppendBlobService
append_blob_service = AppendBlobService(
    account_name='aphrostock',
    account_key=
    'cwAlI7P6WfUqijt0jMP+0CCqM54hmoHRofxqdj9PqPamtIfdm9vRUpC+jrpRv/Idma61sSg7NDIvFwkyhMm7KQ=='
)
containerName = str(datetime.datetime.now())
append_blob_service.create_blob('stockdata', containerName)
append_blob_service.append_blob_from_text('stockdata', containerName,
                                          jsonObject)
#from azure.storage.blob import ContentSettings
#block_blob_service.create_blob_from_path(
#    'myseccontainer',
#    'remotesecfiles3.txt',
#    'localfiles3.txt',
#    content_settings=ContentSettings(content_type='text/html')
#            )

#### To list the blobs in a container, use the list_blobs method. This method returns a generator. 
#### The following code outputs the name of each blob in a container to the console.
#generator = block_blob_service.list_blobs('myseccontainer')
#for blob in generator:
#    print(blob.name)

#### The following example demonstrates using get_blob_to_path to download the contents of the myblob blob and store it to the out-sunset.png file.
#block_blob_service.get_blob_to_path('myseccontainer', 'remotesecf.txt', 'fromazure-out.txt')

#### Finally, to delete a blob, call delete_blob.
block_blob_service.delete_blob('myseccontainer', 'remotesecf.txt')

#### The example below creates a new append blob and appends some data to it, simulating a simple logging operation.
from azure.storage.blob import AppendBlobService
append_blob_service = AppendBlobService(myaccount, mykey)
#The same containers can hold all types of blobs
append_blob_service.create_container('mycontainer')
#Append blobs must be created before they are appended to
append_blob_service.create_blob('mycontainer', 'myappendblob')
append_blob_service.append_blob_from_text('mycontainer', 'myappendblob', u'Sinaq, cumle!')
append_blob = append_blob_service.get_blob_to_text('mycontainer', 'myappendblob')
print(append_blob)
import storageconfig as cfg

from azure.storage.blob import AppendBlobService

# Configure account name with the Azure Storage Account Name and the account Key from Storage Explorer
append_blob_service = AppendBlobService(
    account_name=cfg.storage['storage_account_name'],
    account_key=cfg.storage['storage_account_key'])

# Creates an append blob for this app.
append_blob_service.create_container(cfg.storage['container_name'])
append_blob_service.create_blob(
    cfg.storage['container_name'], cfg.storage['blob_name'])

append_blob_service.append_blob_from_text(cfg.storage['container_name'],
                                          cfg.storage['blob_name'], "Starting: " +
                                          datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S') + "\n")

LatLongDict = {}

# Reads the number of jobs from the command line.
jobCount = int(sys.argv[1])

# Reads the airport data in to a list for easy access.
with open('airports-world.csv') as csvfile:
    reader = csv.DictReader(csvfile)
    for row in reader:
        LatLongDict[row['LocationID']] = [row['Latitude'], row['Longitude']]


# Configures the job size based on the job count passed in.
Example #20
0
class FileLoger():
    def __init__(self, device_properties):
        '''Initialize and create a CSV file
        '''
        start_time = datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
        self.device_properties = device_properties
        if not self.device_properties:
            print('No properties found')
            os._exit(1)

        self.root_folder = os.path.join(resource.get_executor_path(), r'data')
        if not os.path.exists(self.root_folder):
            os.mkdir(self.root_folder)
        self.output_packets = self.device_properties['userMessages'][
            'outputPackets']
        self.log_file_rows = {}
        self.log_file_names = {}
        self.log_files_obj = {}
        self.log_files = {}
        self.user_file_name = ''  # the prefix of log file name.
        self.msgs_need_to_log = []
        self.ws = False
        # azure app.
        self.user_id = ''
        self.file_name = ''
        self.sas_token = ''
        self.db_user_access_token = ''
        # 'http://40.118.233.18:3000/'  # TODO: set a host url
        self.host_url = get_config().ANS_PLATFORM_URL

        #
        self.threads = []  # thread of receiver and paser
        self.exit_thread = False  # flag of exit threads
        self.exit_lock = threading.Lock()  # lock of exit_thread
        self.data_dict = {}  # data container
        self.data_lock = threading.Lock()  # lock of data_queue

        self.device_log_info = None
        self.ans_platform = AnsPlatformAPI()

    def start_user_log(self, file_name='', ws=False):
        '''
        start log.
        return:
                0: OK
                1: exception that has started logging already.
                2: other exception.
        '''
        try:
            if len(self.log_file_rows) > 0:
                return 1  # has started logging already.

            self.ws = ws
            self.exit_thread = False
            self.user_file_name = file_name
            start_time = datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
            current_path = os.path.join(self.root_folder, start_time)
            if not os.path.exists(current_path):
                os.mkdir(current_path)

            for packet in self.output_packets:
                # if 1 == packet['save2file']:
                has_save2file = packet.__contains__('save2file')
                save2file = 1
                if has_save2file:
                    save2file = packet['save2file']

                if save2file == 1:
                    self.msgs_need_to_log.append(packet['name'])

                self.log_file_rows[packet['name']] = 0
                if self.user_file_name == '':
                    self.log_file_names[
                        packet['name']] = packet['name'] + '.csv'
                else:
                    self.log_file_names[packet['name']] = self.user_file_name + \
                        '_' + packet['name'] + '.csv'
                self.log_files[packet['name']] = self.log_file_names[
                    packet['name']]

                self.log_files_obj[packet['name']] = open(
                    current_path + '/' + self.log_file_names[packet['name']],
                    'w')

            if self.ws:
                self.get_sas_token()
                self.data_dict.clear()
                for i, (k, v) in enumerate(self.log_files.items()
                                           ):  # k:pack type  v:log file name
                    self.data_dict[v] = ''
                    threading.Thread(target=self.upload_azure,
                                     args=(k, v)).start()
            return 0
        except Exception as e:
            print('Exception! File:[{0}], Line:[{1}]. Exception:{2}'.format(
                __file__,
                sys._getframe().f_lineno, e))
            return 2

    def stop_user_log(self):
        '''
        stop log.
        return:
                0: OK
                1: exception that driver hasn't started logging files yet.
                2: other exception.
        '''
        rev = 0
        try:
            if len(self.log_file_rows) == 0:
                return 1  # driver hasn't started logging files yet.
            for i, (k, v) in enumerate(self.log_files_obj.items()):
                v.close()
            self.log_file_rows.clear()
            self.log_file_names.clear()
            self.log_files_obj.clear()
            rev = 0
        except Exception as e:
            print(e)
            rev = 2

        if self.ws:
            time.sleep(1)
            self.exit_lock.acquire()
            self.exit_thread = True
            self.exit_lock.release()
            self.ws = False

        return rev

    def upload_azure(self, packet_type, log_file_name):
        if self.db_user_access_token == '' or self.sas_token == '':
            print(
                "Error: Can not upload log to azure since token is empty! Please check the network."
            )

        print(datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S:'),
              log_file_name, ' start.')

        config = get_config()
        account_name = config.AZURE_STORAGE_ACCOUNT
        container_name = config.AZURE_STORAGE_DATA_CONTAINER
        url_name = datetime.datetime.now().strftime(
            '%Y_%m_%d_%H_%M_%S') + '-' + self.user_id + '-' + log_file_name
        bcreate_blob_ok = False

        error_connection = 'ConnectionError'
        error_authorization = 'AuthenticationFailed'

        while True:
            # get data from data_dict.
            self.data_lock.acquire()
            text = self.data_dict[log_file_name]
            self.data_dict[log_file_name] = ''
            self.data_lock.release()

            # check if user stop logging data.
            self.exit_lock.acquire()
            if self.exit_thread:
                # check for internet and text
                if text == '' or (not self.internet_on()):
                    self.exit_lock.release()
                    break
                else:
                    pass
            self.exit_lock.release()

            # let CPU have a break.
            if text == '':
                time.sleep(1)
                continue

            # create blob on azure
            if not bcreate_blob_ok:
                try:
                    self.append_blob_service = AppendBlobService(
                        account_name=account_name,
                        sas_token=self.sas_token,
                        protocol='http')
                    self.append_blob_service.create_blob(
                        container_name=container_name,
                        blob_name=url_name,
                        content_settings=ContentSettings(
                            content_type='text/plain'))
                    bcreate_blob_ok = True
                    threading.Thread(target=self.save_to_db_task,
                                     args=(packet_type, log_file_name,
                                           url_name)).start()
                except Exception as e:
                    # print('Exception when create_blob:', type(e), e)
                    if error_connection in str(e):
                        pass
                    elif error_authorization in str(e):
                        self.get_sas_token()
                        self.append_blob_service = AppendBlobService(
                            account_name=account_name,
                            sas_token=self.sas_token,
                            protocol='http')
                    print('Retry to create_blob again...')
                    continue

            # append blob on azure
            try:
                # self.append_blob_service.append_blob_from_text(countainerName, fileName, text, progress_callback=self.upload_callback)
                self.append_blob_service.append_blob_from_text(
                    container_name, url_name, text)
            except Exception as e:
                # print('Exception when append_blob:', type(e), e)
                if error_connection in str(e):
                    pass
                elif error_authorization in str(e):
                    self.get_sas_token()
                    self.append_blob_service = AppendBlobService(
                        account_name=account_name,
                        sas_token=self.sas_token,
                        protocol='http')
                    # if append blob failed, do not drop 'text', but push 'text' to data_dict and re-append next time.
                    self.data_lock.acquire()
                    self.data_dict[log_file_name] = text + \
                        self.data_dict[log_file_name]
                    self.data_lock.release()

        if bcreate_blob_ok:
            # if not self.save_to_ans_platform(packet_type, log_file_name):
            #     print('save_to_ans_platform failed.')
            print(datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S:'),
                  log_file_name, ' done.')

    def save_to_db_task(self, packet_type, file_name, url_name):
        if not self.save_to_ans_platform(packet_type, file_name, url_name):
            print('save_to_ans_platform failed.')

    def append(self, packet_type, packet):
        if len(self.log_file_rows) == 0:  # if hasn't started logging.
            return

        if packet_type in self.msgs_need_to_log:
            self.log(packet_type, packet)

    def get_log_file_names(self):
        return self.log_file_names.copy()

    def log(self, packet_type, data):
        ''' Parse the data, read in from the unit, and generate a data file using
            the json properties file to create a header and specify the precision
            of the data in the resulting data file.
        '''
        output_packet = next(
            (x for x in self.output_packets if x['name'] == packet_type), None)

        fields = [field['name'] for field in output_packet['payload']]
        '''Write row of CSV file based on data received.  Uses dictionary keys for column titles
        '''
        if self.log_file_rows[packet_type] == 0:
            # Loop through each item in the data dictionary and create a header from the json
            #   properties that correspond to the items in the dictionary
            labels = ''
            # for key in data:
            for i, (k, v) in enumerate(data.items()):
                '''dataStr = output_packet['payload'][i]['name'] + \
                          ' [' + \
                          output_packet['payload'][i]['unit'] + \
                          ']'''
                if not fields.__contains__(k):
                    continue
                data_str = output_packet['payload'][i]['name']
                unit_str = output_packet['payload'][i]['unit']
                if unit_str == '':
                    labels = labels + '{0:s},'.format(data_str)
                else:
                    labels = labels + \
                        '{0:s} ({1:s}),'.format(data_str, unit_str)

            # Remove the comma at the end of the string and append a new-line character
            labels = labels[:-1]
            header = labels + '\n'
        else:
            header = ''

        self.log_file_rows[packet_type] += 1

        # Loop through the items in the data dictionary and append to an output string
        #   (with precision based on the data type defined in the json properties file)
        str = ''
        for i, (k, v) in enumerate(data.items()):
            if not fields.__contains__(k):
                continue
            output_packet_type = output_packet['payload'][i]['type']

            if output_packet['payload'][i].__contains__('scaling'):
                str += '{0},'.format(v)
            else:
                if output_packet_type == 'uint32' or output_packet_type == 'int32' or \
                        output_packet_type == 'uint16' or output_packet_type == 'int16' or \
                        output_packet_type == 'uint64' or output_packet_type == 'int64':
                    # integers and unsigned integers
                    str += '{0:d},'.format(v)
                elif output_packet_type == 'double':
                    # double
                    str += '{0:0.8f},'.format(v)  # 15.12
                elif output_packet_type == 'float':
                    str += '{0:0.4f},'.format(v)  # 12.8
                elif output_packet_type == 'uint8':
                    # byte
                    str += '{0:d},'.format(v)
                elif output_packet_type == 'uchar' or output_packet_type == 'char' or output_packet_type == 'string':
                    # character
                    str += '{:},'.format(v)
                else:
                    # unknown
                    str += '{0:3.5f},'.format(v)
        #
        str = header + str[:-1] + '\n'

        self.log_files_obj[packet_type].write(str)
        self.log_files_obj[packet_type].flush()

        if self.ws:
            self.data_lock.acquire()
            self.data_dict[self.log_files[packet_type]] = self.data_dict[
                self.log_files[packet_type]] + str
            self.data_lock.release()

    def set_info(self, info):
        self.device_log_info = info
        pass

    def set_user_id(self, user_id):
        self.user_id = user_id
        if not isinstance(self.user_id, str):
            self.user_id = str(self.user_id)

    def set_user_access_token(self, access_token):
        self.db_user_access_token = access_token

    def get_sas_token(self):
        try:
            self.ans_platform.set_access_token(self.db_user_access_token)
            self.sas_token = self.ans_platform.get_sas_token()
        except Exception as e:
            self.sas_token = ''
            print('Exception when get_sas_token:', e)

    def save_to_ans_platform(self, packet_type, file_name, url_name):
        ''' Upload CSV related information to the database.
        '''
        if not self.device_log_info:
            return False

        try:
            self.device_log_info['fileName'] = file_name
            self.device_log_info['url'] = url_name
            self.device_log_info['userId'] = self.user_id
            self.device_log_info['logInfo']['packetType'] = packet_type
            data = self.device_log_info

            # data = {
            #     "type": self.device_log_info['type'],
            #     "model": self.device_log_info['name'],
            #     "fileName": file_name,
            #     "url": file_name,
            #     "userId": self.user_id,
            #     "logInfo": {
            #             "pn": self.device_log_info['pn'],
            #             "sn": self.device_log_info['sn'],
            #             "packetType": packet_type,
            #             "insProperties": json.dumps(self.device_properties)
            #     }
            # }

            self.ans_platform.set_access_token(self.db_user_access_token)
            return self.ans_platform.save_record_log(data)
        except Exception as e:
            print('Exception when update db:', e)

    def internet_on(self):
        try:
            url = 'https://navview.blob.core.windows.net/'
            if sys.version_info[0] > 2:
                import urllib.request
                response = urllib.request.urlopen(url, timeout=1)
            else:
                import urllib2
                response = urllib2.urlopen(url, timeout=1)
            # print(response.read())
            return True
        except urllib2.URLError as err:
            return False
Example #21
0
class OpenIMULog:
    
    def __init__(self, imu, user = False):
        '''Initialize and create a CSV file
        '''

        self.name = 'data-' + datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S') + '.csv'
        if user:
            self.user = user
            if self.user['fileName'] == '':
                self.user['fileName'] = self.name
            else:
                self.user['fileName'] += '.csv'
            self.file = open('data/' + self.user['fileName'], 'w')
        else:
            self.file = open('data/' + self.name, 'w')
        self.first_row = 0
        # decode converts out of byte array
        self.ws = imu.ws
        self.sn = imu.device_id.split(" ")[0]
        self.pn = imu.device_id.split(" ")[1]
        self.device_id = imu.device_id
        self.odr_setting = imu.odr_setting
        self.packet_type = imu.packet_type
        self.imu_properties = imu.imu_properties

    # Parse the data, read in from the unit, and generate a data file using
    #   the json properties file to create a header and specify the precision
    #   of the data in the resulting data file.
    def log(self, imu, data):
        #
        output_packet = next((x for x in imu.imu_properties['userMessages']['outputPackets'] if x['name'] == imu.packet_type), None)

        '''Write row of CSV file based on data received.  Uses dictionary keys for column titles
        '''
        if not self.first_row:
            self.first_row = 1

            # Loop through each item in the data dictionary and create a header from the json
            #   properties that correspond to the items in the dictionary
            labels = ''
            keyIdx = -1
            for key in data:
                keyIdx= keyIdx + 1
                '''dataStr = output_packet['payload'][keyIdx]['name'] + \
                          ' [' + \
                          output_packet['payload'][keyIdx]['unit'] + \
                          ']'''
                dataStr = output_packet['payload'][keyIdx]['name']
                labels = labels + '{0:s},'.format(dataStr)
            
            # Remove the comma at the end of the string and append a new-line character
            labels = labels[:-1]
            header = labels + '\n'
        else:
            self.first_row += 1
            header = ''


        # Loop through the items in the data dictionary and append to an output string
        #   (with precision based on the data type defined in the json properties file)
        str = ''
        keyIdx = -1
        for key in data:
            keyIdx= keyIdx + 1
            outputPcktType = output_packet['payload'][keyIdx]['type']

            if outputPcktType == 'uint32' or outputPcktType == 'int32' or \
               outputPcktType == 'uint16' or outputPcktType == 'int16' or \
               outputPcktType == 'uint64' or outputPcktType == 'int64':
                # integers and unsigned integers
                str += '{0:d},'.format(data[key])
            elif outputPcktType == 'double':
                # double
                str += '{0:15.12f},'.format(data[key])
            elif outputPcktType == 'float':
                # print(3) #key + str(2))
                str += '{0:12.8f},'.format(data[key])
            elif outputPcktType == 'uint8':
                # byte
                str += '{0:d},'.format(data[key])
            elif outputPcktType == 'uchar' or outputPcktType == 'char':
                # character
                str += '{:},'.format(data[key])
            else:
                # unknown
                print(0)
                str += '{0:3.5f},'.format(data[key])

        # 
        str = str[:-1]
        str = str + '\n'
        self.file.write(header+str)

    def write_to_azure(self):
        # check for internet 
        # if not self.internet_on(): 
        #    return False

        # record file to cloud
        # f = open("data/" + self.name,"r")
        f = open("data/" + self.user['fileName'], "r")
        text = f.read()
        try: 
            self.append_blob_service = AppendBlobService(account_name='navview', account_key='+roYuNmQbtLvq2Tn227ELmb6s1hzavh0qVQwhLORkUpM0DN7gxFc4j+DF/rEla1EsTN2goHEA1J92moOM/lfxg==', protocol='http')
            self.append_blob_service.create_blob(container_name='data', blob_name=self.name,  content_settings=ContentSettings(content_type='text/plain'))
            self.append_blob_service.append_blob_from_text('data',self.name, text)
        except:
            # Try again!
            print('trying to write again due to exception')
            self.append_blob_service = AppendBlobService(account_name='navview', account_key='+roYuNmQbtLvq2Tn227ELmb6s1hzavh0qVQwhLORkUpM0DN7gxFc4j+DF/rEla1EsTN2goHEA1J92moOM/lfxg==', protocol='http')
            self.append_blob_service.create_blob(container_name='data', blob_name=self.name,  content_settings=ContentSettings(content_type='text/plain'))
            self.append_blob_service.append_blob_from_text('data',self.name, text)


        # record record to ansplatform
        self.record_to_ansplatform()
        
        
    def record_to_ansplatform(self):
        data = { "pn" : self.pn, "sn": self.sn, "fileName" : self.user['fileName'],  "url" : self.name, "imuProperties" : json.dumps(self.imu_properties),
                 "sampleRate" : self.odr_setting, "packetType" : self.packet_type, "userId" : self.user['id'] }
        url = "https://api.aceinna.com/api/datafiles/replaceOrCreate"
        data_json = json.dumps(data)
        headers = {'Content-type': 'application/json', 'Authorization' : self.user['access_token'] }
        response = requests.post(url, data=data_json, headers=headers)
        response = response.json()
       
        # clean up
        self.name = ''

        return  #ends thread

    def internet_on(self):
        try:
            urllib2.urlopen('https://ans-platform.azurewebsites.net', timeout=1)
            return True
        except urllib2.URLError as err: 
            return False

    def close(self):
        time.sleep(0.1)
        if self.ws:
            self.file.close()
            threading.Thread(target=self.write_to_azure).start()
        else:
            self.file.close()
class AzureBlobStore21(implements(StoreInterface)):
    def __init__(self, storage_creds, max_retries=10):
        self.storage_id = storage_creds["name"]
        self.storage_key = storage_creds["key"]

        self.bs = BlockBlobService(account_name=self.storage_id,
                                   account_key=self.storage_key)
        self.append_bs = AppendBlobService(account_name=self.storage_id,
                                           account_key=self.storage_key)

        self.max_retries = max_retries
        self.set_retries(max_retries)

    # ---- HELPER functions ----

    def set_retries(self, count):

        old_count = self.max_retries
        self.max_retries = count

        # bug workaround: standard Retry classes don't retry status=409 (container is being deleted)
        #import azure.storage.common.retry as retry
        #self.bs.retry = retry.LinearRetry(backoff=5, max_attempts=count).retry
        #self.append_bs.retry = retry.LinearRetry(backoff=5, max_attempts=count).retry

        self.bs.retry = utils.make_retry_func(count)
        self.append_bs.retry = utils.make_retry_func(count)

        return old_count

    # ---- MISC part of interface ----

    def get_service_name(self):
        ''' return the unique name of the storage service'''
        return self.storage_id

    def get_retry(self):
        return self.bs.retry

    def set_retry(self, value):
        self.bs.retry = value

    # ---- CONTAINER interface ----

    def does_container_exist(self, container):
        return self.bs.exists(container)

    def create_container(self, container):
        return self.bs.create_container(container)

    def list_containers(self):
        containers = self.bs.list_containers()
        name_list = [contain.name for contain in containers]
        return name_list

    def delete_container(self, container):
        return self.bs.delete_container(container)

    def get_container_properties(self, container):
        props = self.bs.get_container_properties(container)
        return props

    def get_container_metadata(self, container):
        md = self.bs.get_container_metadata(container)
        return md

    # def set_container_metadata(self, container, md_dict):
    #     return self.bs.set_container_metadata(container, md_dict)

    # ---- BLOB interface ----

    def does_blob_exist(self, container, blob_path):
        return self.bs.exists(container, blob_path)

    def create_blob(self, container, blob_path, text, fail_if_exists=False):
        ifn = "*" if fail_if_exists else None

        return self.bs.create_blob_from_text(container,
                                             blob_path,
                                             text,
                                             if_none_match=ifn)

    def create_blob_from_path(self,
                              container,
                              blob_path,
                              source_fn,
                              progress_callback=None):
        result = self.bs.create_blob_from_path(
            container,
            blob_path,
            source_fn,
            progress_callback=progress_callback)
        return result

    def append_blob(self,
                    container,
                    blob_path,
                    text,
                    append_with_rewrite=False):
        # create blob if it doesn't exist

        if not append_with_rewrite:
            # normal handling
            if not self.append_bs.exists(container, blob_path):
                self.append_bs.create_blob(container, blob_path)

            return self.append_bs.append_blob_from_text(
                container, blob_path, text)
        ''' 
        Appends text to a normal blob blob by reading and then rewriting the entire blob.
        Correctly handles concurrency/race conditions.
        Recommended for lots of small items (like 10,000 run names).

        Note: we turn off retries on azure CALL-level so that we can retry on 
        OUR CALL-level.
        '''
        # experimental local retry loop
        old_retry = self.bs.get_retry()
        self.bs.set_retry(utils.make_retry_func(0))
        succeeded = False

        for i in range(20):

            try:
                if self.bs.does_blob_exist(container, blob_path):
                    # read prev contents
                    blob_text = self.bs.get_blob_text(container, blob_path)
                    # append our text
                    new_text = blob_text + text
                    # write blob, ensuring etag matches (no one updated since above read)
                    self.bs.create_blob(container,
                                        blob_path,
                                        new_text,
                                        if_match=blob.properties.etag)
                else:
                    # if no previous blob, just try to create it
                    self.bs.create_blob(container, blob_path, text)
            except BaseException as ex:
                logger.exception(
                    "Error in _append_blob_with_retries, ex={}".format(ex))
                sleep_time = np.random.random() * 4
                console.diag(
                    "XT store received an expected azure exception; will backoff for {:.4f} secs [retry #{}]"
                    .format(sleep_time, i + 1))
                time.sleep(sleep_time)
            else:
                succeeded = True
                break

        # restore retry
        self.bs.set_retry(old_retry)

        if not succeeded:
            errors.service_error(
                "_append_blob_with_rewrite failed (too many retries)")

    def list_blobs(self,
                   container,
                   path=None,
                   return_names=True,
                   recursive=True):
        '''
        NOTE: the semantics here a tricky

        if recursive:
            - return a flat list of all full path names of all files (no directory entries)
        else: 
            - return a flat list of all files and all directory names (add "/" to end of directory names)

        if return_names:
            - return list of names
        else:
            - return a list of objects with following properties:
                .name     (file pathname)
                .properties
                    .content_length   (number)
                    .modified_ns      (time in ns)

        The delimiter trick: this is when we set the delimiter arg = "/" to tell azure to return only the blobs 
        in the specified directory - that is, don't return blobs from child directories.  In this case, azure 
        returns the effective child directory name, followed by a "/", but not its contents (which we hope is faster).
        '''
        delimiter = None if recursive else "/"

        # specific Azure path rules for good results
        if path:
            if path.startswith("/"):
                path = path[
                    1:]  # blob API wants this part of path relative to container

            # we should only add a "/" if path is a folder path
            if path.endswith("*"):
                # we just need to block the addition of "/"
                path = path[0:-1]
            elif not path.endswith("/"):
                path += "/"  # best if path ends with "/"

        blobs = self.bs.list_blobs(container, prefix=path, delimiter=delimiter)

        if return_names:
            blobs = [blob.name for blob in blobs]
        else:
            blobs = list(blobs)
        return blobs

    def delete_blob(self, container, blob_path, snapshot=None):
        dss = DeleteSnapshot()
        return self.bs.delete_blob(container,
                                   blob_path,
                                   delete_snapshots=dss.Include)

    def get_blob_text(self, container, blob_path):
        # watch out for 0-length blobs - they trigger an Azure RETRY error
        text = ""
        # azure storage bug workaround: avoid RETRY errors for 0-length blob
        blob = self.bs.get_blob_properties(container, blob_path)
        if blob.properties.content_length:
            blob = self.bs.get_blob_to_text(container, blob_path)
            text = blob.content
        return text

    def get_blob_to_path(self,
                         container,
                         blob_path,
                         dest_fn,
                         snapshot=None,
                         progress_callback=None):
        # azure storage bug workaround: avoid RETRY errors for 0-length blob
        blob = self.bs.get_blob_properties(container, blob_path)
        if blob.properties.content_length:
            result = self.bs.get_blob_to_path(
                container,
                blob_path,
                dest_fn,
                snapshot=snapshot,
                progress_callback=progress_callback)
            text = result.content
        else:
            md = blob.metadata
            if "hdi_isfolder" in md and md["hdi_isfolder"]:
                # its a directory marker; do NOT create a local file for it
                text = ""
            else:
                # 0-length text file; just write the file outselves
                text = ""
                with open(dest_fn, "wt") as outfile:
                    outfile.write(text)

        return text

    def get_blob_properties(self, container, blob_path):
        props = self.bs.get_blob_properties(container, blob_path)
        return props

    def get_blob_metadata(self, container, blob_path):
        return self.bs.get_blob_metadata(container, blob_path)

    # def set_blob_metadata(self, container, blob_path, md_dict):
    #     return self.bs.set_blob_metadata(container, blob_path, md_dict)

    def copy_blob(self, source_container, source_blob_path, dest_container,
                  dest_blob_path):
        source_blob_url = self.bs.make_blob_url(source_container,
                                                source_blob_path)
        self.bs.copy_blob(dest_container, dest_blob_path, source_blob_url)

    def snapshot_blob(self, container, blob_path):
        blob = self.bs.snapshot_blob(container, blob_path)
        #pd = utils.obj_to_dict(blob)
        return blob
Example #23
0
import json
import requests
import pprint
from azure.storage.blob import AppendBlobService
from azure.storage.blob import ContentSettings

append_blob_service = AppendBlobService(
    account_name='navview',
    account_key=
    '+roYuNmQbtLvq2Tn227ELmb6s1hzavh0qVQwhLORkUpM0DN7gxFc4j+DF/rEla1EsTN2goHEA1J92moOM/lfxg==',
    protocol='http')
append_blob_service.create_blob(
    container_name='data',
    blob_name="data-2018_05_04_13_13_24.csv",
    content_settings=ContentSettings(content_type='text/plain'))
f = open("data/data-2018_05_04_13_13_24.csv", "r")
append_blob_service.append_blob_from_text('data',
                                          "data-2018_05_04_13_13_24.csv",
                                          f.read())
Example #24
0
    account_name=cfg.storage['storage_account_name'],
    account_key=cfg.storage['storage_account_key'])


# Reads the start and stop index passed in through SLURM
start = int(sys.argv[1])
stop = int(sys.argv[2])

# Creates the blob for this batch.
append_blob_service.create_blob(
    'distances', str(start) + "-" + str(stop) + '.csv')


# Logs the start time
append_blob_service.append_blob_from_text(
    cfg.storage['container_name'],
    cfg.storage['blob_name'], "Starting " + str(start) + "-" + str(
        stop) + ":" + datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S') + "\n")


LatLongDict = {}
# radius of earth in miles
R = 3959.0

# Reads the airport data in to a list for easy access.
with open('airports-world.csv') as csvfile:
    reader = csv.DictReader(csvfile)
    for row in reader:
        LatLongDict[row['LocationID']] = [row['Latitude'], row['Longitude']]


# Creates the column names for the distance table
class LogWriter(object):
    """description of class"""

    LOG_CONTAINER_NAME = r'log-files'
    DEBUG_MODE = bool(os.getenv('DEBUG_MODE', False))

    # コンストラクタ
    def __init__(self, name, key, subFolderName=None):
        super(LogWriter, self).__init__()

        self._name = name
        self._key = key
        self.m_szLogFileName = ""
        self.m_szSubFolderName = subFolderName
        self.m_pBlobService = AppendBlobService(name, key)

    #}def __init__

    def _CreateLogFile(self):
        """ ログファイルを作成する。WriteLog を呼び出す前に実行すること。 """

        szRet = ""
        if (LogWriter.DEBUG_MODE):
            return ("Debug モードのためスキップします。")

        try:
            if (0 == len(self.m_szLogFileName)):
                szRet = "create_container"
                bIsExists = self.m_pBlobService.exists(
                    LogWriter.LOG_CONTAINER_NAME)
                if bIsExists:
                    pass
                else:
                    self.m_pBlobService.create_container(
                        LogWriter.LOG_CONTAINER_NAME,
                        public_access=PublicAccess.Blob)

                #ログファイル名の決定
                #// 後ろに追加しているが len で 0 と調べているため空文字列
                if ((self.m_szSubFolderName is not None)
                        and (0 < len(self.m_szSubFolderName))):
                    #// サブフォルダー名が指定されているときは追加する
                    self.m_szLogFileName += self.m_szSubFolderName + "\\"
                #}if
                self.m_szLogFileName += r"{0:%Y-%m-%dT%H-%M-%S.log}".format(
                    datetime.datetime.now())

                bIsExists = self.m_pBlobService.exists(
                    LogWriter.LOG_CONTAINER_NAME, self.m_szLogFileName)
                if bIsExists:
                    szRet = "already blob."
                else:
                    szRet = "create_blob"
                    self.m_pBlobService.create_blob(
                        LogWriter.LOG_CONTAINER_NAME, self.m_szLogFileName)
                szRet = "OK"
            else:
                szRet = "Already called."
                szRet = "OK"
            #}if

        except Exception as e:
            #szRet = "Log exception";
            szRet = szRet + "\r\n" + str(e)
            pass
        return szRet

    #}def

    def WriteLog(self, txt):
        """ ログファイルにテキストを出力する。末尾に改行コードが追加される。 """
        szRet = ""
        szLogText = r"{0:%Y-%m-%d %H:%M:%S}".format(
            datetime.datetime.now()) + r" : " + txt + "\r\n"
        if (LogWriter.DEBUG_MODE):
            print(szLogText)
            return ("Debug モードのためスキップしました。")

        try:
            #ログファイルの作成
            self._CreateLogFile()

            szRet = "append_blob_from_text"
            self.m_pBlobService.append_blob_from_text(
                LogWriter.LOG_CONTAINER_NAME, self.m_szLogFileName, szLogText)
            szRet = "OK"
        except Exception as e:
            #szRet = "Log exception";
            szRet = szRet + "\r\n" + str(e)
        #try

        return szRet

    #}def

    def WriteBlob(self, blob_name, value):
        """ 単一 BLOB ファイルを作成しテキストを保存する。 """
        szRet = ""
        if (LogWriter.DEBUG_MODE):
            return ("Debug モードのため書き込みをしません。")

        try:
            #blob_name = r'sample.txt';

            szRet = "BlockBlobService"
            blob_service = BlockBlobService(self._name, self._key)

            szRet = "create_container"
            blob_service.create_container(LogWriter.LOG_CONTAINER_NAME,
                                          public_access=PublicAccess.Blob)

            szRet = "create_blob_from_bytes"
            #blob_service.create_blob_from_bytes(
            #    log_container_name,
            #    log_blob_name,
            #    b'<center><h1>Hello World!</h1></center>',
            #    content_settings=ContentSettings('text/html')
            #)

            if (isinstance(value, str)):
                szRet = "create_blob_from_text"
                blob_service.create_blob_from_text(
                    LogWriter.LOG_CONTAINER_NAME, blob_name, value)
            else:
                szRet = "create_blob_from_stream"
                blob_service.create_blob_from_stream(
                    LogWriter.LOG_CONTAINER_NAME, blob_name, io.BytesIO(value))
            #}if

            #szRet = "make_blob_url"
            #print(blob_service.make_blob_url(log_container_name, log_blob_name))

            szRet = "OK"
        except:
            print(r"Exception.")
        #try

        return szRet

    #def WriteBlob( blob_name, txt ):

    def MakeBlobUri(self, blob_name):
        blob_service = BlockBlobService(self._name, self._key)
        szRet = blob_service.make_blob_url(LogWriter.LOG_CONTAINER_NAME,
                                           blob_name)

        return (szRet)

    #}def


#}class
Example #26
0
import subprocess
import datetime
import time

from azure.storage.blob import AppendBlobService

# Configure account name with the Azure Storage Account Name and the account Key from Storage Explorer
append_blob_service = AppendBlobService(
	account_name='storage_account_name', 
	account_key='storage_account_key')

# Creates an append blob for this app.
append_blob_service.create_container('distances')
append_blob_service.create_blob('distances', 'log.txt')

append_blob_service.append_blob_from_text('distances', 'log.txt', "Starting: " + datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S') + "\n")

LatLongDict = {}

# Reads the number of jobs from the command line.
jobCount = int(sys.argv[1])

# Reads the airport data in to a list for easy access.
with open('airports-world.csv') as csvfile:
	reader = csv.DictReader(csvfile)
	for row in reader:
		LatLongDict[row['LocationID']] = [row['Latitude'], row['Longitude']]


# Configures the job size based on the job count passed in.
jobSize = int(len(LatLongDict) / jobCount) + 1
Example #27
0
#    'myseccontainer',
#    'remotesecfiles3.txt',
#    'localfiles3.txt',
#    content_settings=ContentSettings(content_type='text/html')
#            )

#### To list the blobs in a container, use the list_blobs method. This method returns a generator.
#### The following code outputs the name of each blob in a container to the console.
#generator = block_blob_service.list_blobs('myseccontainer')
#for blob in generator:
#    print(blob.name)

#### The following example demonstrates using get_blob_to_path to download the contents of the myblob blob and store it to the out-sunset.png file.
#block_blob_service.get_blob_to_path('myseccontainer', 'remotesecf.txt', 'fromazure-out.txt')

#### Finally, to delete a blob, call delete_blob.
block_blob_service.delete_blob('myseccontainer', 'remotesecf.txt')

#### The example below creates a new append blob and appends some data to it, simulating a simple logging operation.
from azure.storage.blob import AppendBlobService
append_blob_service = AppendBlobService(myaccount, mykey)
#The same containers can hold all types of blobs
append_blob_service.create_container('mycontainer')
#Append blobs must be created before they are appended to
append_blob_service.create_blob('mycontainer', 'myappendblob')
append_blob_service.append_blob_from_text('mycontainer', 'myappendblob',
                                          u'Sinaq, cumle!')
append_blob = append_blob_service.get_blob_to_text('mycontainer',
                                                   'myappendblob')
print(append_blob)