def prepare_storage_account(storage_account_name, storage_access_key, endpoint_suffix, protocol="https"):
    blob_service = AppendBlobService(account_name=storage_account_name, account_key=storage_access_key, endpoint_suffix=endpoint_suffix, protocol=protocol)
    blob_service.create_container('bosh')
    blob_service.create_container(
        container_name='stemcell',
        public_access='blob'
    )

    # Prepare the table for storing metadata of storage account and stemcells
    table_service = TableService(account_name=storage_account_name, account_key=storage_access_key, endpoint_suffix=endpoint_suffix, protocol=protocol)
    table_service.create_table('stemcells')
def prepare_storage(settings):
    default_storage_account_name = settings["DEFAULT_STORAGE_ACCOUNT_NAME"]
    storage_access_key = settings["STORAGE_ACCESS_KEY"]

    blob_service = AppendBlobService(default_storage_account_name, storage_access_key)
    blob_service.create_container('bosh')
    blob_service.create_container(
        container_name='stemcell',
        public_access='blob'
    )

    # Prepare the table for storing meta datas of storage account and stemcells
    table_service = TableService(default_storage_account_name, storage_access_key)
    table_service.create_table('stemcells')
def prepare_storage(settings):
    default_storage_account_name = settings["DEFAULT_STORAGE_ACCOUNT_NAME"]
    storage_access_key = settings["DEFAULT_STORAGE_ACCESS_KEY"]
    endpoint_suffix = settings["SERVICE_HOST_BASE"]
    protocol = "https"
    if settings["ENVIRONMENT"] == "AzureStack":
        protocol = "http"

    blob_service = AppendBlobService(account_name=default_storage_account_name, account_key=storage_access_key, endpoint_suffix=endpoint_suffix, protocol=protocol)
    blob_service.create_container('bosh')
    blob_service.create_container(
        container_name='stemcell',
        public_access='blob'
    )

    # Prepare the table for storing meta datas of storage account and stemcells
    table_service = TableService(account_name=default_storage_account_name, account_key=storage_access_key, endpoint_suffix=endpoint_suffix, protocol=protocol)
    table_service.create_table('stemcells')
Beispiel #4
0
def CreateLogFile():
    """ ログファイルを作成する。WriteLog を呼び出す前に実行すること。 """
    szRet = ""
    try:
        szRet = "AppendBlobService"
        blob_service = AppendBlobService(account_name, account_key)
        szRet = "create_container"
        bIsExists = blob_service.exists(log_container_name)
        if bIsExists:
            pass
        else:
            blob_service.create_container(log_container_name,
                                          public_access=PublicAccess.Blob)
        bIsExists = blob_service.exists(log_container_name, log_file_name)
        if bIsExists:
            szRet = "already blob."
        else:
            szRet = "create_blob"
            blob_service.create_blob(log_container_name, log_file_name)
        szRet = "OK"
    except:
        #szRet = "Log exception";
        pass
    return szRet
def prepare_storage(settings):
    default_storage_account_name = settings["DEFAULT_STORAGE_ACCOUNT_NAME"]
    storage_access_key = settings["STORAGE_ACCESS_KEY"]
    endpoint_suffix = settings["SERVICE_HOST_BASE"]

    blob_service = AppendBlobService(account_name=default_storage_account_name,
                                     account_key=storage_access_key,
                                     endpoint_suffix=endpoint_suffix)
    blob_service.create_container('bosh')
    blob_service.create_container(container_name='stemcell',
                                  public_access='blob')

    # Prepare the table for storing meta datas of storage account and stemcells
    table_service = TableService(account_name=default_storage_account_name,
                                 account_key=storage_access_key,
                                 endpoint_suffix=endpoint_suffix)
    table_service.create_table('stemcells')
def prepare_storage_account(storage_account_name,
                            storage_access_key,
                            endpoint_suffix,
                            protocol="https"):
    blob_service = AppendBlobService(account_name=storage_account_name,
                                     account_key=storage_access_key,
                                     endpoint_suffix=endpoint_suffix,
                                     protocol=protocol)
    blob_service.create_container('bosh')
    blob_service.create_container(container_name='stemcell',
                                  public_access='blob')

    # Prepare the table for storing metadata of storage account and stemcells
    table_service = TableService(account_name=storage_account_name,
                                 account_key=storage_access_key,
                                 endpoint_suffix=endpoint_suffix,
                                 protocol=protocol)
    table_service.create_table('stemcells')
Beispiel #7
0
def stream_logs(client,
                resource_group,
                service,
                app,
                deployment,
                no_format=False,
                raise_error_on_failure=True):
    log_file_sas = None
    error_msg = "Could not get logs for Service: {}".format(service)

    try:
        log_file_sas = client.get_log_file_url(
            resource_group_name=resource_group,
            service_name=service,
            app_name=app,
            deployment_name=deployment).url
    except (AttributeError, CloudError) as e:
        logger.warning("%s Exception: %s", error_msg, e)
        raise CLIError(error_msg)

    if not log_file_sas:
        logger.warning("%s Empty SAS URL.", error_msg)
        raise CLIError(error_msg)

    account_name, endpoint_suffix, container_name, blob_name, sas_token = get_blob_info(
        log_file_sas)

    _stream_logs(no_format,
                 DEFAULT_CHUNK_SIZE,
                 DEFAULT_LOG_TIMEOUT_IN_SEC,
                 AppendBlobService(
                     account_name=account_name,
                     sas_token=sas_token,
                     endpoint_suffix=endpoint_suffix),
                 container_name,
                 blob_name,
                 raise_error_on_failure)
Beispiel #8
0
class AppendBlob:
    '''Append blob used to append data, each time called append, the content will be append to the blob'''
    def __init__(self):
        #account = account_name or secretconf["azure"]["storage"][0]["account"]
        #key = account_key or secretconf["azure"]["storage"][0]["key"]
        connstr = os.getenv(
            "AZURE_STORAGE_CONNECTION_STRING",
            False) or secretconf["azure"]["storage"][0]["connection_string"]
        self.abservice = AppendBlobService(connection_string=connstr)

    def create(self, container, blob, metadata=None):
        '''Create an empty blob
        
        Args:
            container: name of the container
            blob: name of the blob, use '/' to create a folder
            metadata: meta data (dict object, value must be str) of the text

        Returns:
            url of blob
        '''
        self.abservice.create_blob(container,
                                   blob,
                                   metadata=metadata,
                                   content_settings=textcontent,
                                   if_none_match="*")

        now = datetime.now()
        start = now + timedelta(-1, 0, 0)
        expiry = now + timedelta(365, 0, 0)
        sastoken = self.abservice.generate_blob_shared_access_signature(
            container,
            blob,
            permission=blobpermission,
            expiry=expiry,
            start=start)

        return self.abservice.make_blob_url(container,
                                            blob,
                                            sas_token=sastoken)

    def appendText(self, container, blob, text, metadata=None):
        '''Append text to blob'''
        self.abservice.append_blob_from_text(container, blob, text)
Beispiel #9
0
class LogIMU380Data:
    def __init__(self):
        '''Initialize and create a blob with CSV extension
        '''
        self.name = 'data-' + datetime.datetime.now().strftime(
            '%Y_%m_%d_%H_%M_%S') + '.csv'
        self.append_blob_service = AppendBlobService(
            account_name='navview',
            account_key=
            '+roYuNmQbtLvq2Tn227ELmb6s1hzavh0qVQwhLORkUpM0DN7gxFc4j+DF/rEla1EsTN2goHEA1J92moOM/lfxg==',
            protocol='http')
        self.append_blob_service.create_blob(
            container_name='data',
            blob_name=self.name,
            content_settings=ContentSettings(content_type='text/plain'))
        self.first_row = 0
        self.write_str = ''

    def log(self, data, odr_setting):
        '''Buffers and then stores stream based on ODR.  Must buffer due to cloud write time.  
            Uses dictionary keys for column titles
        '''
        odr_rates = {
            0: 0,
            1: 100,
            2: 50,
            5: 25,
            10: 20,
            20: 10,
            25: 5,
            50: 2
        }
        delta_t = 1.0 / odr_rates[odr_setting]

        if not self.first_row:
            self.first_row = 1
            header = ''.join('{0:s},'.format(key) for key in data)
            header = header[:-1]
            header = 'sample,' + header
            header = header + '\r\n'
        else:
            header = ''
            self.first_row += 1

        str = ''
        for key in data:
            if key == 'BITstatus' or key == 'GPSITOW' or key == 'counter' or key == 'timeITOW':
                str += '{0:d},'.format(data[key])
            else:
                str += '{0:3.5f},'.format(data[key])

        str = str[:-1]
        str = '{0:5.2f},'.format(delta_t * (self.first_row - 1)) + str
        str = str + '\r\n'
        self.write_str = self.write_str + header + str

        if (self.first_row % 100 == 0):
            self.write_to_azure()

    def write_to_azure(self):
        '''Appends buffered CSV string to current Azure blob
        '''
        self.append_blob_service.append_blob_from_text('data', self.name,
                                                       self.write_str)
        self.write_str = ''

    def close(self):
        '''Closes blob
        '''
        self.write_to_azure()
        self.name = ''
# Usage: Call python3 controller.py X, where X is the number of SLURM
# jobs you SLURM to spawn on the SLURM nodes

import csv
import sys
import subprocess
import datetime
import time
import storageconfig as cfg

from azure.storage.blob import AppendBlobService

# Configure account name with the Azure Storage Account Name and the account Key from Storage Explorer
append_blob_service = AppendBlobService(
    account_name=cfg.storage['storage_account_name'],
    account_key=cfg.storage['storage_account_key'])

# Creates an append blob for this app.
append_blob_service.create_container(cfg.storage['container_name'])
append_blob_service.create_blob(
    cfg.storage['container_name'], cfg.storage['blob_name'])

append_blob_service.append_blob_from_text(cfg.storage['container_name'],
                                          cfg.storage['blob_name'], "Starting: " +
                                          datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S') + "\n")

LatLongDict = {}

# Reads the number of jobs from the command line.
jobCount = int(sys.argv[1])
Beispiel #11
0
import csv
import sys
from math import sin, cos, sqrt, atan2, radians
import datetime
import time

from azure.storage.blob import AppendBlobService

# Configure account name with the Azure Storage Account Name and the account Key from Storage Explorer
append_blob_service = AppendBlobService(account_name='storage_account_name',
                                        account_key='storage_account_key')

# Reads the start and stop index passed in through SLURM
start = int(sys.argv[1])
stop = int(sys.argv[2])

#Creates the blob for this batch.
append_blob_service.create_blob('distances',
                                str(start) + "-" + str(stop) + '.csv')

#Logs the start time
append_blob_service.append_blob_from_text(
    'distances', 'log.txt', "Starting " + str(start) + "-" + str(stop) + ":" +
    datetime.datetime.fromtimestamp(
        time.time()).strftime('%Y-%m-%d %H:%M:%S') + "\n")

LatLongDict = {}
# radius of earth in miles
R = 3959.0

# Reads the airport data in to a list for easy access.
class AzureBlobStore21(implements(StoreInterface)):
    def __init__(self, storage_creds, max_retries=10):
        self.storage_id = storage_creds["name"]
        self.storage_key = storage_creds["key"]

        self.bs = BlockBlobService(account_name=self.storage_id,
                                   account_key=self.storage_key)
        self.append_bs = AppendBlobService(account_name=self.storage_id,
                                           account_key=self.storage_key)

        self.max_retries = max_retries
        self.set_retries(max_retries)

    # ---- HELPER functions ----

    def set_retries(self, count):

        old_count = self.max_retries
        self.max_retries = count

        # bug workaround: standard Retry classes don't retry status=409 (container is being deleted)
        #import azure.storage.common.retry as retry
        #self.bs.retry = retry.LinearRetry(backoff=5, max_attempts=count).retry
        #self.append_bs.retry = retry.LinearRetry(backoff=5, max_attempts=count).retry

        self.bs.retry = utils.make_retry_func(count)
        self.append_bs.retry = utils.make_retry_func(count)

        return old_count

    # ---- MISC part of interface ----

    def get_service_name(self):
        ''' return the unique name of the storage service'''
        return self.storage_id

    def get_retry(self):
        return self.bs.retry

    def set_retry(self, value):
        self.bs.retry = value

    # ---- CONTAINER interface ----

    def does_container_exist(self, container):
        return self.bs.exists(container)

    def create_container(self, container):
        return self.bs.create_container(container)

    def list_containers(self):
        containers = self.bs.list_containers()
        name_list = [contain.name for contain in containers]
        return name_list

    def delete_container(self, container):
        return self.bs.delete_container(container)

    def get_container_properties(self, container):
        props = self.bs.get_container_properties(container)
        return props

    def get_container_metadata(self, container):
        md = self.bs.get_container_metadata(container)
        return md

    # def set_container_metadata(self, container, md_dict):
    #     return self.bs.set_container_metadata(container, md_dict)

    # ---- BLOB interface ----

    def does_blob_exist(self, container, blob_path):
        return self.bs.exists(container, blob_path)

    def create_blob(self, container, blob_path, text, fail_if_exists=False):
        ifn = "*" if fail_if_exists else None

        return self.bs.create_blob_from_text(container,
                                             blob_path,
                                             text,
                                             if_none_match=ifn)

    def create_blob_from_path(self,
                              container,
                              blob_path,
                              source_fn,
                              progress_callback=None):
        result = self.bs.create_blob_from_path(
            container,
            blob_path,
            source_fn,
            progress_callback=progress_callback)
        return result

    def append_blob(self,
                    container,
                    blob_path,
                    text,
                    append_with_rewrite=False):
        # create blob if it doesn't exist

        if not append_with_rewrite:
            # normal handling
            if not self.append_bs.exists(container, blob_path):
                self.append_bs.create_blob(container, blob_path)

            return self.append_bs.append_blob_from_text(
                container, blob_path, text)
        ''' 
        Appends text to a normal blob blob by reading and then rewriting the entire blob.
        Correctly handles concurrency/race conditions.
        Recommended for lots of small items (like 10,000 run names).

        Note: we turn off retries on azure CALL-level so that we can retry on 
        OUR CALL-level.
        '''
        # experimental local retry loop
        old_retry = self.bs.get_retry()
        self.bs.set_retry(utils.make_retry_func(0))
        succeeded = False

        for i in range(20):

            try:
                if self.bs.does_blob_exist(container, blob_path):
                    # read prev contents
                    blob_text = self.bs.get_blob_text(container, blob_path)
                    # append our text
                    new_text = blob_text + text
                    # write blob, ensuring etag matches (no one updated since above read)
                    self.bs.create_blob(container,
                                        blob_path,
                                        new_text,
                                        if_match=blob.properties.etag)
                else:
                    # if no previous blob, just try to create it
                    self.bs.create_blob(container, blob_path, text)
            except BaseException as ex:
                logger.exception(
                    "Error in _append_blob_with_retries, ex={}".format(ex))
                sleep_time = np.random.random() * 4
                console.diag(
                    "XT store received an expected azure exception; will backoff for {:.4f} secs [retry #{}]"
                    .format(sleep_time, i + 1))
                time.sleep(sleep_time)
            else:
                succeeded = True
                break

        # restore retry
        self.bs.set_retry(old_retry)

        if not succeeded:
            errors.service_error(
                "_append_blob_with_rewrite failed (too many retries)")

    def list_blobs(self,
                   container,
                   path=None,
                   return_names=True,
                   recursive=True):
        '''
        NOTE: the semantics here a tricky

        if recursive:
            - return a flat list of all full path names of all files (no directory entries)
        else: 
            - return a flat list of all files and all directory names (add "/" to end of directory names)

        if return_names:
            - return list of names
        else:
            - return a list of objects with following properties:
                .name     (file pathname)
                .properties
                    .content_length   (number)
                    .modified_ns      (time in ns)

        The delimiter trick: this is when we set the delimiter arg = "/" to tell azure to return only the blobs 
        in the specified directory - that is, don't return blobs from child directories.  In this case, azure 
        returns the effective child directory name, followed by a "/", but not its contents (which we hope is faster).
        '''
        delimiter = None if recursive else "/"

        # specific Azure path rules for good results
        if path:
            if path.startswith("/"):
                path = path[
                    1:]  # blob API wants this part of path relative to container

            # we should only add a "/" if path is a folder path
            if path.endswith("*"):
                # we just need to block the addition of "/"
                path = path[0:-1]
            elif not path.endswith("/"):
                path += "/"  # best if path ends with "/"

        blobs = self.bs.list_blobs(container, prefix=path, delimiter=delimiter)

        if return_names:
            blobs = [blob.name for blob in blobs]
        else:
            blobs = list(blobs)
        return blobs

    def delete_blob(self, container, blob_path, snapshot=None):
        dss = DeleteSnapshot()
        return self.bs.delete_blob(container,
                                   blob_path,
                                   delete_snapshots=dss.Include)

    def get_blob_text(self, container, blob_path):
        # watch out for 0-length blobs - they trigger an Azure RETRY error
        text = ""
        # azure storage bug workaround: avoid RETRY errors for 0-length blob
        blob = self.bs.get_blob_properties(container, blob_path)
        if blob.properties.content_length:
            blob = self.bs.get_blob_to_text(container, blob_path)
            text = blob.content
        return text

    def get_blob_to_path(self,
                         container,
                         blob_path,
                         dest_fn,
                         snapshot=None,
                         progress_callback=None):
        # azure storage bug workaround: avoid RETRY errors for 0-length blob
        blob = self.bs.get_blob_properties(container, blob_path)
        if blob.properties.content_length:
            result = self.bs.get_blob_to_path(
                container,
                blob_path,
                dest_fn,
                snapshot=snapshot,
                progress_callback=progress_callback)
            text = result.content
        else:
            md = blob.metadata
            if "hdi_isfolder" in md and md["hdi_isfolder"]:
                # its a directory marker; do NOT create a local file for it
                text = ""
            else:
                # 0-length text file; just write the file outselves
                text = ""
                with open(dest_fn, "wt") as outfile:
                    outfile.write(text)

        return text

    def get_blob_properties(self, container, blob_path):
        props = self.bs.get_blob_properties(container, blob_path)
        return props

    def get_blob_metadata(self, container, blob_path):
        return self.bs.get_blob_metadata(container, blob_path)

    # def set_blob_metadata(self, container, blob_path, md_dict):
    #     return self.bs.set_blob_metadata(container, blob_path, md_dict)

    def copy_blob(self, source_container, source_blob_path, dest_container,
                  dest_blob_path):
        source_blob_url = self.bs.make_blob_url(source_container,
                                                source_blob_path)
        self.bs.copy_blob(dest_container, dest_blob_path, source_blob_url)

    def snapshot_blob(self, container, blob_path):
        blob = self.bs.snapshot_blob(container, blob_path)
        #pd = utils.obj_to_dict(blob)
        return blob
Beispiel #13
0
    def upload_azure(self, packet_type, log_file_name):
        if self.db_user_access_token == '' or self.sas_token == '':
            print(
                "Error: Can not upload log to azure since token is empty! Please check the network."
            )

        print(datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S:'),
              log_file_name, ' start.')

        config = get_config()
        account_name = config.AZURE_STORAGE_ACCOUNT
        container_name = config.AZURE_STORAGE_DATA_CONTAINER
        url_name = datetime.datetime.now().strftime(
            '%Y_%m_%d_%H_%M_%S') + '-' + self.user_id + '-' + log_file_name
        bcreate_blob_ok = False

        error_connection = 'ConnectionError'
        error_authorization = 'AuthenticationFailed'

        while True:
            # get data from data_dict.
            self.data_lock.acquire()
            text = self.data_dict[log_file_name]
            self.data_dict[log_file_name] = ''
            self.data_lock.release()

            # check if user stop logging data.
            self.exit_lock.acquire()
            if self.exit_thread:
                # check for internet and text
                if text == '' or (not self.internet_on()):
                    self.exit_lock.release()
                    break
                else:
                    pass
            self.exit_lock.release()

            # let CPU have a break.
            if text == '':
                time.sleep(1)
                continue

            # create blob on azure
            if not bcreate_blob_ok:
                try:
                    self.append_blob_service = AppendBlobService(
                        account_name=account_name,
                        sas_token=self.sas_token,
                        protocol='http')
                    self.append_blob_service.create_blob(
                        container_name=container_name,
                        blob_name=url_name,
                        content_settings=ContentSettings(
                            content_type='text/plain'))
                    bcreate_blob_ok = True
                    threading.Thread(target=self.save_to_db_task,
                                     args=(packet_type, log_file_name,
                                           url_name)).start()
                except Exception as e:
                    # print('Exception when create_blob:', type(e), e)
                    if error_connection in str(e):
                        pass
                    elif error_authorization in str(e):
                        self.get_sas_token()
                        self.append_blob_service = AppendBlobService(
                            account_name=account_name,
                            sas_token=self.sas_token,
                            protocol='http')
                    print('Retry to create_blob again...')
                    continue

            # append blob on azure
            try:
                # self.append_blob_service.append_blob_from_text(countainerName, fileName, text, progress_callback=self.upload_callback)
                self.append_blob_service.append_blob_from_text(
                    container_name, url_name, text)
            except Exception as e:
                # print('Exception when append_blob:', type(e), e)
                if error_connection in str(e):
                    pass
                elif error_authorization in str(e):
                    self.get_sas_token()
                    self.append_blob_service = AppendBlobService(
                        account_name=account_name,
                        sas_token=self.sas_token,
                        protocol='http')
                    # if append blob failed, do not drop 'text', but push 'text' to data_dict and re-append next time.
                    self.data_lock.acquire()
                    self.data_dict[log_file_name] = text + \
                        self.data_dict[log_file_name]
                    self.data_lock.release()

        if bcreate_blob_ok:
            # if not self.save_to_ans_platform(packet_type, log_file_name):
            #     print('save_to_ans_platform failed.')
            print(datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S:'),
                  log_file_name, ' done.')
#from azure.storage.blob import ContentSettings
#block_blob_service.create_blob_from_path(
#    'myseccontainer',
#    'remotesecfiles3.txt',
#    'localfiles3.txt',
#    content_settings=ContentSettings(content_type='text/html')
#            )

#### To list the blobs in a container, use the list_blobs method. This method returns a generator. 
#### The following code outputs the name of each blob in a container to the console.
#generator = block_blob_service.list_blobs('myseccontainer')
#for blob in generator:
#    print(blob.name)

#### The following example demonstrates using get_blob_to_path to download the contents of the myblob blob and store it to the out-sunset.png file.
#block_blob_service.get_blob_to_path('myseccontainer', 'remotesecf.txt', 'fromazure-out.txt')

#### Finally, to delete a blob, call delete_blob.
block_blob_service.delete_blob('myseccontainer', 'remotesecf.txt')

#### The example below creates a new append blob and appends some data to it, simulating a simple logging operation.
from azure.storage.blob import AppendBlobService
append_blob_service = AppendBlobService(myaccount, mykey)
#The same containers can hold all types of blobs
append_blob_service.create_container('mycontainer')
#Append blobs must be created before they are appended to
append_blob_service.create_blob('mycontainer', 'myappendblob')
append_blob_service.append_blob_from_text('mycontainer', 'myappendblob', u'Sinaq, cumle!')
append_blob = append_blob_service.get_blob_to_text('mycontainer', 'myappendblob')
print(append_blob)
Beispiel #15
0
#    'myseccontainer',
#    'remotesecfiles3.txt',
#    'localfiles3.txt',
#    content_settings=ContentSettings(content_type='text/html')
#            )

#### To list the blobs in a container, use the list_blobs method. This method returns a generator.
#### The following code outputs the name of each blob in a container to the console.
#generator = block_blob_service.list_blobs('myseccontainer')
#for blob in generator:
#    print(blob.name)

#### The following example demonstrates using get_blob_to_path to download the contents of the myblob blob and store it to the out-sunset.png file.
#block_blob_service.get_blob_to_path('myseccontainer', 'remotesecf.txt', 'fromazure-out.txt')

#### Finally, to delete a blob, call delete_blob.
block_blob_service.delete_blob('myseccontainer', 'remotesecf.txt')

#### The example below creates a new append blob and appends some data to it, simulating a simple logging operation.
from azure.storage.blob import AppendBlobService
append_blob_service = AppendBlobService(myaccount, mykey)
#The same containers can hold all types of blobs
append_blob_service.create_container('mycontainer')
#Append blobs must be created before they are appended to
append_blob_service.create_blob('mycontainer', 'myappendblob')
append_blob_service.append_blob_from_text('mycontainer', 'myappendblob',
                                          u'Sinaq, cumle!')
append_blob = append_blob_service.get_blob_to_text('mycontainer',
                                                   'myappendblob')
print(append_blob)
class LogWriter(object):
    """description of class"""

    LOG_CONTAINER_NAME = r'log-files'
    DEBUG_MODE = bool(os.getenv('DEBUG_MODE', False))

    # コンストラクタ
    def __init__(self, name, key, subFolderName=None):
        super(LogWriter, self).__init__()

        self._name = name
        self._key = key
        self.m_szLogFileName = ""
        self.m_szSubFolderName = subFolderName
        self.m_pBlobService = AppendBlobService(name, key)

    #}def __init__

    def _CreateLogFile(self):
        """ ログファイルを作成する。WriteLog を呼び出す前に実行すること。 """

        szRet = ""
        if (LogWriter.DEBUG_MODE):
            return ("Debug モードのためスキップします。")

        try:
            if (0 == len(self.m_szLogFileName)):
                szRet = "create_container"
                bIsExists = self.m_pBlobService.exists(
                    LogWriter.LOG_CONTAINER_NAME)
                if bIsExists:
                    pass
                else:
                    self.m_pBlobService.create_container(
                        LogWriter.LOG_CONTAINER_NAME,
                        public_access=PublicAccess.Blob)

                #ログファイル名の決定
                #// 後ろに追加しているが len で 0 と調べているため空文字列
                if ((self.m_szSubFolderName is not None)
                        and (0 < len(self.m_szSubFolderName))):
                    #// サブフォルダー名が指定されているときは追加する
                    self.m_szLogFileName += self.m_szSubFolderName + "\\"
                #}if
                self.m_szLogFileName += r"{0:%Y-%m-%dT%H-%M-%S.log}".format(
                    datetime.datetime.now())

                bIsExists = self.m_pBlobService.exists(
                    LogWriter.LOG_CONTAINER_NAME, self.m_szLogFileName)
                if bIsExists:
                    szRet = "already blob."
                else:
                    szRet = "create_blob"
                    self.m_pBlobService.create_blob(
                        LogWriter.LOG_CONTAINER_NAME, self.m_szLogFileName)
                szRet = "OK"
            else:
                szRet = "Already called."
                szRet = "OK"
            #}if

        except Exception as e:
            #szRet = "Log exception";
            szRet = szRet + "\r\n" + str(e)
            pass
        return szRet

    #}def

    def WriteLog(self, txt):
        """ ログファイルにテキストを出力する。末尾に改行コードが追加される。 """
        szRet = ""
        szLogText = r"{0:%Y-%m-%d %H:%M:%S}".format(
            datetime.datetime.now()) + r" : " + txt + "\r\n"
        if (LogWriter.DEBUG_MODE):
            print(szLogText)
            return ("Debug モードのためスキップしました。")

        try:
            #ログファイルの作成
            self._CreateLogFile()

            szRet = "append_blob_from_text"
            self.m_pBlobService.append_blob_from_text(
                LogWriter.LOG_CONTAINER_NAME, self.m_szLogFileName, szLogText)
            szRet = "OK"
        except Exception as e:
            #szRet = "Log exception";
            szRet = szRet + "\r\n" + str(e)
        #try

        return szRet

    #}def

    def WriteBlob(self, blob_name, value):
        """ 単一 BLOB ファイルを作成しテキストを保存する。 """
        szRet = ""
        if (LogWriter.DEBUG_MODE):
            return ("Debug モードのため書き込みをしません。")

        try:
            #blob_name = r'sample.txt';

            szRet = "BlockBlobService"
            blob_service = BlockBlobService(self._name, self._key)

            szRet = "create_container"
            blob_service.create_container(LogWriter.LOG_CONTAINER_NAME,
                                          public_access=PublicAccess.Blob)

            szRet = "create_blob_from_bytes"
            #blob_service.create_blob_from_bytes(
            #    log_container_name,
            #    log_blob_name,
            #    b'<center><h1>Hello World!</h1></center>',
            #    content_settings=ContentSettings('text/html')
            #)

            if (isinstance(value, str)):
                szRet = "create_blob_from_text"
                blob_service.create_blob_from_text(
                    LogWriter.LOG_CONTAINER_NAME, blob_name, value)
            else:
                szRet = "create_blob_from_stream"
                blob_service.create_blob_from_stream(
                    LogWriter.LOG_CONTAINER_NAME, blob_name, io.BytesIO(value))
            #}if

            #szRet = "make_blob_url"
            #print(blob_service.make_blob_url(log_container_name, log_blob_name))

            szRet = "OK"
        except:
            print(r"Exception.")
        #try

        return szRet

    #def WriteBlob( blob_name, txt ):

    def MakeBlobUri(self, blob_name):
        blob_service = BlockBlobService(self._name, self._key)
        szRet = blob_service.make_blob_url(LogWriter.LOG_CONTAINER_NAME,
                                           blob_name)

        return (szRet)

    #}def


#}class
import os
import io
from azure.storage.blob import BlockBlobService
from azure.storage.blob import AppendBlobService

STORAGE_ACCOUNT_NAME = os.environ['STORAGE_ACCOUNT_NAME']
STORAGE_ACCOUNT_KEY = os.environ['STORAGE_ACCOUNT_KEY']

LOGS_CONTAINER_NAME = 'logs'
LOGS_ARCHIVE_CONTAINER_NAME = 'logs-archive'

append_blob_service = AppendBlobService(account_name=STORAGE_ACCOUNT_NAME,
                                        account_key=STORAGE_ACCOUNT_KEY)
block_blob_service = BlockBlobService(account_name=STORAGE_ACCOUNT_NAME,
                                      account_key=STORAGE_ACCOUNT_KEY)

if not append_blob_service.exists(LOGS_CONTAINER_NAME):
    exit(0)

if not block_blob_service.exists(LOGS_ARCHIVE_CONTAINER_NAME):
    block_blob_service.create_container(LOGS_ARCHIVE_CONTAINER_NAME)

generator = append_blob_service.list_blobs(LOGS_CONTAINER_NAME)
for blob in generator:
    with io.BytesIO() as stream:
        append_blob_service.get_blob_to_stream(
            container_name=LOGS_CONTAINER_NAME,
            blob_name=blob.name,
            stream=stream,
            max_connections=2)
        stream.seek(0)
Beispiel #18
0
import json
import requests
import pprint
from azure.storage.blob import AppendBlobService
from azure.storage.blob import ContentSettings

append_blob_service = AppendBlobService(
    account_name='navview',
    account_key=
    '+roYuNmQbtLvq2Tn227ELmb6s1hzavh0qVQwhLORkUpM0DN7gxFc4j+DF/rEla1EsTN2goHEA1J92moOM/lfxg==',
    protocol='http')
append_blob_service.create_blob(
    container_name='data',
    blob_name="data-2018_05_04_13_13_24.csv",
    content_settings=ContentSettings(content_type='text/plain'))
f = open("data/data-2018_05_04_13_13_24.csv", "r")
append_blob_service.append_blob_from_text('data',
                                          "data-2018_05_04_13_13_24.csv",
                                          f.read())
from flask import Flask, request, abort
from azure.storage.blob import AppendBlobService
from datetime import datetime
import requests, json
app = Flask(__name__)

storageService = AppendBlobService(
    connection_string=
    'DefaultEndpointsProtocol=https;AccountName=thingspronotify;AccountKey=axD13Z8R9WkPSi7mcdRUtnPfx9skMYfjf3D/vA92tcs21TOSqcJMHfc4TBvribk4Ed09kNc0EIgct8lJzYRe6w==;EndpointSuffix=core.windows.net'
)


@app.route("/")
def hello():
    writeToAppendBlob('ThingsPro Notify')
    return "ThingsPro Notify"


@app.route("/receiver", methods=['POST'])
def receiver():
    writeToAppendBlob('Receive...')
    try:
        if 'x-amz-sns-message-type' in request.headers:
            AWS_MESSAGE_TYPE = request.headers.get('x-amz-sns-message-type')
            if AWS_MESSAGE_TYPE == 'SubscriptionConfirmation':
                postData = json.loads(request.data)
                subscribeURL = postData['SubscribeURL']
                writeToAppendBlob('SubscribeURL:' + subscribeURL)
                response = requests.get(subscribeURL)
                writeToAppendBlob(response.text)
                return 'OK'
Beispiel #20
0
class RoverLogApp(rover_application_base.RoverApplicationBase):
    def __init__(self, user=False):
        '''Initialize and create a CSV file
        '''

        if user and list(user.keys())[0] == 'startLog':
            self.username = user['startLog']['username']
            self.userId = user['startLog']['id']
            self.userFilename = user['startLog']['fileName']
            self.userAccessToken = user['startLog']['access_token']

        self.start_time = datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
        if not self.load_configuration():
            os._exit(1)
        if not os.path.exists('data/'):
            os.mkdir('data/')
        self.output_packets = self.rover_properties['userMessages'][
            'outputPackets']
        self.first_row = {}
        self.log_file_names = {}
        self.log_files = {}

        if user:
            with open('tempLogFiles.json', 'r') as outfile:
                data = json.load(outfile)
                self.log_file_names = data

        if not user:
            with open('tempLogFiles.json', 'w') as outfile:
                json.dump({}, outfile)
            try:
                for packet in self.output_packets:
                    self.first_row[packet['name']] = 0
                    self.log_file_names[packet['name']] = packet[
                        'name'] + '-' + self.start_time + '.csv'
                    self.log_files[packet['name']] = open(
                        'data/' + self.log_file_names[packet['name']],
                        'w')  # just log Compact Navigation Message

                    entry = {
                        packet['name']: self.log_file_names[packet['name']]
                    }

                    with open('tempLogFiles.json') as f:
                        data = json.load(f)

                    data.update(entry)
                    with open('tempLogFiles.json', 'w') as f:
                        json.dump(data, f)
            except:
                pass

        if user and list(user.keys())[0] == 'startLog':
            self.savetoAnsPlatform()

        if user and list(user.keys())[0] == 'stopLog':
            time.sleep(10)
            self.close()
            # os.remove("tempLogFiles.json")

    def on_reinit(self):
        print("RoverLogApp.on_reinit()")
        pass
        # Is it necessary to create a new file to log when replug serial connector?

        # self.start_time = datetime.datetime.now().strftime('%Y%m%d_%H_%M_%S')
        # try:
        #     for packet in self.output_packets:
        #         self.first_row[packet['name']] = 0
        #         self.log_file_names[packet['name']] = packet['name'] +'-' + self.start_time + '.csv'
        #         self.log_files[packet['name']] = open('data/' + self.log_file_names[packet['name']], 'w')# just log Compact Navigation Message
        # except:
        #     pass

    def on_find_active_rover(self):
        print("RoverLogApp.on_find_active_rover()")

    def on_message(self, *args):
        packet_type = args[0]
        self.data = args[1]
        is_var_len_frame = args[2]
        if is_var_len_frame:
            self.log_var_len(self.data, packet_type)
        else:
            self.log(self.data, packet_type)

    def on_exit(self):
        pass

    def load_configuration(self):
        '''
        load properties from 'rover.json'
        returns: True when load successfully.
                 False when load failed.
        '''
        try:
            with open('setting/rover.json') as json_data:
                self.rover_properties = json.load(json_data)
            return True
        # except (ValueError, KeyError, TypeError) as error:
        except Exception as e:
            print(e)
            return False

    def log(self, data, packet_type):
        ''' Parse the data, read in from the unit, and generate a data file using
            the json properties file to create a header and specify the precision
            of the data in the resulting data file.
        '''
        if not self.rover_properties:
            return

        output_packet = next(
            (x for x in self.rover_properties['userMessages']['outputPackets']
             if x['name'] == packet_type), None)
        '''Write row of CSV file based on data received.  Uses dictionary keys for column titles
        '''
        if not self.first_row[packet_type]:
            self.first_row[packet_type] = 1

            # Loop through each item in the data dictionary and create a header from the json
            #   properties that correspond to the items in the dictionary
            labels = ''
            keyIdx = -1
            for key in data:
                keyIdx = keyIdx + 1
                '''dataStr = output_packet['payload'][keyIdx]['name'] + \
                          ' [' + \
                          output_packet['payload'][keyIdx]['unit'] + \
                          ']'''
                dataStr = output_packet['payload'][keyIdx]['name']
                unitStr = output_packet['payload'][keyIdx]['unit']
                if unitStr == '':
                    labels = labels + '{0:s},'.format(dataStr)
                else:
                    labels = labels + '{0:s}({1:s}),'.format(dataStr, unitStr)

            # Remove the comma at the end of the string and append a new-line character
            labels = labels[:-1]
            header = labels + '\n'
        else:
            self.first_row[packet_type] += 1
            header = ''

        # Loop through the items in the data dictionary and append to an output string
        #   (with precision based on the data type defined in the json properties file)
        str = ''
        keyIdx = -1
        for key in data:
            keyIdx = keyIdx + 1
            outputPcktType = output_packet['payload'][keyIdx]['type']

            if outputPcktType == 'uint32' or outputPcktType == 'int32' or \
               outputPcktType == 'uint16' or outputPcktType == 'int16' or \
               outputPcktType == 'uint64' or outputPcktType == 'int64':
                # integers and unsigned integers
                str += '{0:d},'.format(data[key])
            elif outputPcktType == 'double':
                # double
                str += '{0:15.12f},'.format(data[key])
            elif outputPcktType == 'float':
                # print(3) #key + str(2))
                str += '{0:12.8f},'.format(data[key])
            elif outputPcktType == 'uint8':
                # byte
                str += '{0:d},'.format(data[key])
            elif outputPcktType == 'uchar' or outputPcktType == 'char':
                # character
                str += '{:},'.format(data[key])
            else:
                # unknown
                str += '{0:3.5f},'.format(data[key])
        #
        str = str[:-1]
        str = str + '\n'
        self.log_files[packet_type].write(header + str)

    def log_var_len(self, data, packet_type):
        ''' Parse the data, read in from the unit, and generate a data file using
            the json properties file to create a header and specify the precision
            of the data in the resulting data file.
        '''
        if not self.rover_properties:
            return

        output_packet = next(
            (x for x in self.rover_properties['userMessages']['outputPackets']
             if x['name'] == packet_type), None)
        '''Write row of CSV file based on data received.  Uses dictionary keys for column titles
        '''
        if not self.first_row[packet_type]:
            self.first_row[packet_type] = 1

            # Loop through each item in the data dictionary and create a header from the json
            #   properties that correspond to the items in the dictionary
            labels = ''
            for value in output_packet['payload']:
                dataStr = value['name']
                unitStr = value['unit']
                if unitStr == '':
                    labels = labels + '{0:s},'.format(dataStr)
                else:
                    labels = labels + '{0:s}({1:s}),'.format(dataStr, unitStr)
            # Remove the comma at the end of the string and append a new-line character
            labels = labels[:-1]
            header = labels + '\n'
        else:
            self.first_row[packet_type] += 1
            header = ''

        # Loop through the items in the data dictionary and append to an output string
        #   (with precision based on the data type defined in the json properties file)
        str = ''
        const_str = ''
        var_str = ''
        var_fileld_tpyes = []
        var_fileld_num = len(
            output_packet['payload']) - output_packet['var_num']['field_idx']
        const_fileld_num = len(output_packet['payload']) - var_fileld_num

        for idx, value in enumerate(output_packet['payload']):
            if idx >= const_fileld_num:
                var_fileld_tpyes.append(value['type'])

        for idx, key in enumerate(data):
            if idx < const_fileld_num:
                outputPcktType = output_packet['payload'][idx]['type']

                if outputPcktType == 'uint32' or outputPcktType == 'int32' or \
                outputPcktType == 'uint16' or outputPcktType == 'int16' or \
                outputPcktType == 'uint64' or outputPcktType == 'int64':
                    # integers and unsigned integers
                    const_str += '{0:d},'.format(list(key.values())[0])
                elif outputPcktType == 'double':
                    # double
                    const_str += '{0:15.12f},'.format(list(key.values())[0])
                elif outputPcktType == 'float':
                    # print(3) #key + str(2))
                    const_str += '{0:12.8f},'.format(list(key.values())[0])
                elif outputPcktType == 'uint8':
                    # byte
                    const_str += '{0:d},'.format(list(key.values())[0])
                elif outputPcktType == 'uchar' or outputPcktType == 'char':
                    # character
                    const_str += '{:},'.format(list(key.values())[0])
                else:
                    # unknown
                    const_str += '{0:3.5f},'.format(key.values()[0])
            else:
                idx_key = -1
                for k, v in key.items():
                    idx_key += 1
                    outputPcktType = var_fileld_tpyes[idx_key]
                    if outputPcktType == 'uint32' or outputPcktType == 'int32' or \
                    outputPcktType == 'uint16' or outputPcktType == 'int16' or \
                    outputPcktType == 'uint64' or outputPcktType == 'int64':
                        # integers and unsigned integers
                        var_str += '{0:d},'.format(v)
                    elif outputPcktType == 'double':
                        # double
                        var_str += '{0:15.12f},'.format(v)
                    elif outputPcktType == 'float':
                        # print(3) #key + str(2))
                        var_str += '{0:12.8f},'.format(v)
                    elif outputPcktType == 'uint8':
                        # byte
                        var_str += '{0:d},'.format(v)
                    elif outputPcktType == 'uchar' or outputPcktType == 'char':
                        # character
                        var_str += '{:},'.format(v)
                    else:
                        # unknown
                        var_str += '{0:3.5f},'.format(v)

                str = const_str + var_str
                str = str[:-1]
                str = str + '\n'
                self.log_files[packet_type].write(header + str)
                header = ''
                str = ''
                var_str = ''

    ''' Upload CSV's to Azure container.
    '''

    def uploadtoAzure(self, fileDisplayName):

        # f = open("data/" + self.user['fileName'], "r")
        f = open("data/" + fileDisplayName, "r")
        text = f.read()
        account_key = '+roYuNmQbtLvq2Tn227ELmb6s1hzavh0qVQwhLORkUpM0DN7gxFc4j+DF/rEla1EsTN2goHEA1J92moOM/lfxg=='

        try:
            self.azureStorage('navview', account_key, 'data', fileDisplayName,
                              text)
        except:
            # Try again!
            self.azureStorage('navview', account_key, 'data', fileDisplayName,
                              text)
        ''' Trigger Database upload
        '''
        # self.savetoAnsPlatform()

    def azureStorage(self, accountName, accountkey, countainerName, fileName,
                     text):
        self.append_blob_service = AppendBlobService(account_name=accountName,
                                                     account_key=accountkey,
                                                     protocol='http')
        self.append_blob_service.create_blob(
            container_name=countainerName,
            blob_name=fileName,
            content_settings=ContentSettings(content_type='text/plain'))
        self.append_blob_service.append_blob_from_text(countainerName,
                                                       fileName, text)

    ''' Upload CSV related information to the database.
    '''

    def savetoAnsPlatform(self):
        for files in self.log_file_names:
            fileDisplayName = files + "-" + self.userFilename + ".csv"

            data = {
                "pn": '1.0.0',
                "sn": 'rtk',
                "fileName": fileDisplayName,
                "url": self.log_file_names[files],
                "imuProperties": json.dumps(self.rover_properties),
                "sampleRate": '100',
                "packetType": files,
                "userId": self.userId
            }

            url = "https://api.aceinna.com/api/datafiles/replaceOrCreate"
            data_json = json.dumps(data)
            headers = {
                'Content-type': 'application/json',
                'Authorization': self.userAccessToken
            }
            response = requests.post(url, data=data_json, headers=headers)
            response = response.json()

    # def close(self,fileName,storedFile):
    #     time.sleep(0.1)
    #     # if self.ws:
    #     storedFile.close()
    #     threading.Thread(target=self.uploadtoAzure(fileName)).start()
    #     # else:
    #     #     self.file.close()
    #     # print('close')
    #     # try:
    #     #     for packet in self.output_packets:
    #     #         self.log_files[packet['name']].close()
    #     #         threading.Thread(target=self.write_to_azurelog_files[packet['name']]).start()
    #     # except:
    #     #     pass

    def close(self):
        for files in self.log_file_names:
            self.uploadtoAzure(self.log_file_names[files])
class LogIMU380Data:
    def __init__(self, imu, user):
        '''Initialize and create a CSV file
        '''
        self.name = 'data-' + datetime.datetime.now().strftime(
            '%Y_%m_%d_%H_%M_%S') + '.csv'
        self.file = open('data/' + self.name, 'w')
        self.first_row = 0
        # decode converts out of byte array
        self.sn = imu.device_id.split(" ")[0]
        self.pn = imu.device_id.split(" ")[1]
        self.device_id = imu.device_id
        self.odr_setting = imu.odr_setting
        self.packet_type = imu.packet_type
        odr_rates = {0: 'Quiet', 1: '100Hz', 2: '50Hz', 4: '25Hz'}
        self.sample_rate = odr_rates[self.odr_setting]

    def log(self, data, odr_setting):
        '''Write row of CSV file based on data received.  Uses dictionary keys for column titles
        '''
        if not self.first_row:
            self.first_row = 1
            labels = ''.join('{0:s},'.format(key) for key in data)
            labels = labels[:-1]
            header = labels + '\n'
        else:
            self.first_row += 1
            header = ''

        str = ''
        for key in data:
            if key == 'BITstatus' or key == 'GPSITOW' or key == 'counter' or key == 'timeITOW':
                str += '{0:d},'.format(data[key])
            else:
                str += '{0:3.5f},'.format(data[key])
        str = str[:-1]
        str = str + '\n'
        self.file.write(header + str)

    def write_to_azure(self):
        # check for internet
        # if not self.internet_on():
        #    return False

        # record file to cloud
        self.append_blob_service = AppendBlobService(
            account_name='navview',
            account_key=
            '+roYuNmQbtLvq2Tn227ELmb6s1hzavh0qVQwhLORkUpM0DN7gxFc4j+DF/rEla1EsTN2goHEA1J92moOM/lfxg==',
            protocol='http')
        self.append_blob_service.create_blob(
            container_name='data',
            blob_name=self.name,
            content_settings=ContentSettings(content_type='text/plain'))
        f = open("data/" + self.name, "r")
        self.append_blob_service.append_blob_from_text('data', self.name,
                                                       f.read())

        # TODO: check if success

        # record record to ansplatform
        self.record_to_ansplatform()

    def record_to_ansplatform(self):
        data = {
            "pn": self.pn,
            "sn": self.sn,
            "fileName": self.user['fileName'],
            "url": self.name,
            "imuProperties": json.dumps(self.imu_properties),
            "sampleRate": self.sample_rate,
            "packetType": self.packet_type,
            "userId": self.user['id']
        }
        url = "https://ans-platform.azurewebsites.net/api/datafiles/replaceOrCreate"
        data_json = json.dumps(data)
        headers = {
            'Content-type': 'application/json',
            'Authorization': self.user['access_token']
        }
        response = requests.post(url, data=data_json, headers=headers)
        response = response.json()
        print(response)

        # clean up
        self.file.close()
        self.name = ''

        return  #ends thread

    def internet_on(self):
        try:
            urllib2.urlopen('https://ans-platform.azurewebsites.net',
                            timeout=1)
            return True
        except urllib2.URLError as err:
            return False

    def close(self):
        time.sleep(0.1)
        threading.Thread(target=self.write_to_azure).start()
Beispiel #22
0
class OpenIMULog:
    
    def __init__(self, imu, user = False):
        '''Initialize and create a CSV file
        '''

        self.name = 'data-' + datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S') + '.csv'
        if user:
            self.user = user
            if self.user['fileName'] == '':
                self.user['fileName'] = self.name
            else:
                self.user['fileName'] += '.csv'
            self.file = open('data/' + self.user['fileName'], 'w')
        else:
            self.file = open('data/' + self.name, 'w')
        self.first_row = 0
        # decode converts out of byte array
        self.ws = imu.ws
        self.sn = imu.device_id.split(" ")[0]
        self.pn = imu.device_id.split(" ")[1]
        self.device_id = imu.device_id
        self.odr_setting = imu.odr_setting
        self.packet_type = imu.packet_type
        self.imu_properties = imu.imu_properties

    # Parse the data, read in from the unit, and generate a data file using
    #   the json properties file to create a header and specify the precision
    #   of the data in the resulting data file.
    def log(self, imu, data):
        #
        output_packet = next((x for x in imu.imu_properties['userMessages']['outputPackets'] if x['name'] == imu.packet_type), None)

        '''Write row of CSV file based on data received.  Uses dictionary keys for column titles
        '''
        if not self.first_row:
            self.first_row = 1

            # Loop through each item in the data dictionary and create a header from the json
            #   properties that correspond to the items in the dictionary
            labels = ''
            keyIdx = -1
            for key in data:
                keyIdx= keyIdx + 1
                '''dataStr = output_packet['payload'][keyIdx]['name'] + \
                          ' [' + \
                          output_packet['payload'][keyIdx]['unit'] + \
                          ']'''
                dataStr = output_packet['payload'][keyIdx]['name']
                labels = labels + '{0:s},'.format(dataStr)
            
            # Remove the comma at the end of the string and append a new-line character
            labels = labels[:-1]
            header = labels + '\n'
        else:
            self.first_row += 1
            header = ''


        # Loop through the items in the data dictionary and append to an output string
        #   (with precision based on the data type defined in the json properties file)
        str = ''
        keyIdx = -1
        for key in data:
            keyIdx= keyIdx + 1
            outputPcktType = output_packet['payload'][keyIdx]['type']

            if outputPcktType == 'uint32' or outputPcktType == 'int32' or \
               outputPcktType == 'uint16' or outputPcktType == 'int16' or \
               outputPcktType == 'uint64' or outputPcktType == 'int64':
                # integers and unsigned integers
                str += '{0:d},'.format(data[key])
            elif outputPcktType == 'double':
                # double
                str += '{0:15.12f},'.format(data[key])
            elif outputPcktType == 'float':
                # print(3) #key + str(2))
                str += '{0:12.8f},'.format(data[key])
            elif outputPcktType == 'uint8':
                # byte
                str += '{0:d},'.format(data[key])
            elif outputPcktType == 'uchar' or outputPcktType == 'char':
                # character
                str += '{:},'.format(data[key])
            else:
                # unknown
                print(0)
                str += '{0:3.5f},'.format(data[key])

        # 
        str = str[:-1]
        str = str + '\n'
        self.file.write(header+str)

    def write_to_azure(self):
        # check for internet 
        # if not self.internet_on(): 
        #    return False

        # record file to cloud
        # f = open("data/" + self.name,"r")
        f = open("data/" + self.user['fileName'], "r")
        text = f.read()
        try: 
            self.append_blob_service = AppendBlobService(account_name='navview', account_key='+roYuNmQbtLvq2Tn227ELmb6s1hzavh0qVQwhLORkUpM0DN7gxFc4j+DF/rEla1EsTN2goHEA1J92moOM/lfxg==', protocol='http')
            self.append_blob_service.create_blob(container_name='data', blob_name=self.name,  content_settings=ContentSettings(content_type='text/plain'))
            self.append_blob_service.append_blob_from_text('data',self.name, text)
        except:
            # Try again!
            print('trying to write again due to exception')
            self.append_blob_service = AppendBlobService(account_name='navview', account_key='+roYuNmQbtLvq2Tn227ELmb6s1hzavh0qVQwhLORkUpM0DN7gxFc4j+DF/rEla1EsTN2goHEA1J92moOM/lfxg==', protocol='http')
            self.append_blob_service.create_blob(container_name='data', blob_name=self.name,  content_settings=ContentSettings(content_type='text/plain'))
            self.append_blob_service.append_blob_from_text('data',self.name, text)


        # record record to ansplatform
        self.record_to_ansplatform()
        
        
    def record_to_ansplatform(self):
        data = { "pn" : self.pn, "sn": self.sn, "fileName" : self.user['fileName'],  "url" : self.name, "imuProperties" : json.dumps(self.imu_properties),
                 "sampleRate" : self.odr_setting, "packetType" : self.packet_type, "userId" : self.user['id'] }
        url = "https://api.aceinna.com/api/datafiles/replaceOrCreate"
        data_json = json.dumps(data)
        headers = {'Content-type': 'application/json', 'Authorization' : self.user['access_token'] }
        response = requests.post(url, data=data_json, headers=headers)
        response = response.json()
       
        # clean up
        self.name = ''

        return  #ends thread

    def internet_on(self):
        try:
            urllib2.urlopen('https://ans-platform.azurewebsites.net', timeout=1)
            return True
        except urllib2.URLError as err: 
            return False

    def close(self):
        time.sleep(0.1)
        if self.ws:
            self.file.close()
            threading.Thread(target=self.write_to_azure).start()
        else:
            self.file.close()
Beispiel #23
0
from sys import stderr
from werkzeug.wrappers import PlainRequest, Response

__all__ = ['application']

load_dotenv()

rootLogger = getLogger()
rootLogger.setLevel('INFO')
rootLogger.addHandler(StreamHandler(stderr))
logger = getLogger(__name__)

connection_string = environ['AZURE_STORAGE_CONNECTION_STRING']
container_name = environ['AZURE_STORAGE_CONTAINER_NAME']

append_blob_service = AppendBlobService(connection_string=connection_string)


@PlainRequest.application
def application(request):
    '''
    :param PlainRequest request:
    '''
    try:
        if request.method == 'GET' and request.path == '/healthz':
            append_blob_service.get_container_properties(
                container_name=container_name)
            return Response(status=204)

        elif request.method == 'POST' and request.path == '/':
            try:
Beispiel #24
0
def main(msg: func.QueueMessage) -> None:
    # consume videoname from event that is put on blob storage using filename

    logging.info('Python queue trigger function processed a queue item: %s',
                 msg.get_body().decode('utf-8'))

    raw = msg.get_body().decode('utf-8')
    logging.info(raw)
    eventVideo = json.loads(raw)

    blockBlobService = BlockBlobService(
        account_name=os.environ['remoteStorageAccountName'],
        account_key=os.environ['remoteStorageAccountKey'])
    appendBlobService = AppendBlobService(
        account_name=os.environ['remoteStorageAccountName'],
        account_key=os.environ['remoteStorageAccountKey'])

    cap = getVideo(blockBlobService, eventVideo)
    folder = eventVideo["filename"]

    # process video
    x = 1
    frameRate = cap.get(5)  #frame rate
    numberOfPicturesPerSecond = int(os.environ["numberOfPicturesPerSecond"])
    # start creating frames from video
    while (cap.isOpened()):
        frameId = cap.get(1)  #current frame number
        ret, frame = cap.read()
        if (ret != True):
            break

        # in case frame matches a multiple of the frame, create image
        if frameId % math.floor(frameRate / numberOfPicturesPerSecond) == 0:
            logging.info("create cap" + str(x))
            # convert frame to PIL image
            frame_conv = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
            pilImage = Image.fromarray(frame_conv)
            imgByteArr = BytesIO()
            pilImage.save(imgByteArr, format='PNG')
            imgByteArr = imgByteArr.getvalue()

            # write image to blob for logging
            imageFileName = folder + "/log/image" + str(int(x)) + "_img.png"
            blockBlobService.create_blob_from_bytes(
                os.environ['remoteStorageOutputContainer'], imageFileName,
                imgByteArr)

            # try to recognize objects and text from image
            description, caption, confidence, text, fullText = analyzeImage(
                imgByteArr)
            # in case trainnumber is recognized of train, public to Power BI
            trainNumber = ""
            if (len(text) == 4 and text.isdigit()) or text[:2] == "NL":
                trainNumber = text
                publishPowerBI(blockBlobService, x, imgByteArr, folder,
                               eventVideo, text, caption)

            # write identification of image to csv
            loggingCsv = "\n" + str(imageFileName) + "|" + str(
                caption) + "|" + str(confidence) + "|" + str(
                    json.dumps(text).replace(
                        '|', ':pipe')) + "|" + str(trainNumber) + "|" + str(
                            json.dumps(fullText).replace('|', ':pipe'))
            appendBlobService.append_blob_from_text("logging",
                                                    os.environ["loggingcsv"],
                                                    loggingCsv)

            # increment image
            x += 1
def prepare_storage(settings):
    default_storage_account_name = settings["DEFAULT_STORAGE_ACCOUNT_NAME"]
    storage_access_key = settings["STORAGE_ACCESS_KEY"]
    endpoint_suffix = settings["SERVICE_HOST_BASE"]

    blob_service = AppendBlobService(account_name=default_storage_account_name, account_key=storage_access_key, endpoint_suffix=endpoint_suffix)
    blob_service.create_container('bosh')
    blob_service.create_container(
        container_name='stemcell',
        public_access='blob'
    )

    # Prepare the table for storing meta datas of storage account and stemcells
    table_service = TableService(account_name=default_storage_account_name, account_key=storage_access_key, endpoint_suffix=endpoint_suffix)
    table_service.create_table('stemcells')

    # For secondary
    default_storage_account_name_secondary = settings["DEFAULT_STORAGE_ACCOUNT_NAME_SECONDARY"]
    default_storage_access_key_secondary = settings["DEFAULT_STORAGE_ACCESS_KEY_SECONDARY"]
    endpoint_suffix = settings["SERVICE_HOST_BASE"]

    blob_service = AppendBlobService(account_name=default_storage_account_name_secondary, account_key=default_storage_access_key_secondary, endpoint_suffix=endpoint_suffix)
    blob_service.create_container('bosh')
    blob_service.create_container(
        container_name='stemcell',
        public_access='blob'
    )

    # Prepare the table for storing meta datas of storage account and stemcells
    table_service = TableService(account_name=default_storage_account_name_secondary, account_key=default_storage_access_key_secondary, endpoint_suffix=endpoint_suffix)
    table_service.create_table('stemcells')


    # Prepare primary premium storage account
    storage_account_name_primary = settings["STORAGE_ACCOUNT_NAME_PRIMARY"]
    storage_access_key_primary = settings["STORAGE_ACCESS_KEY_PRIMARY"]
    endpoint_suffix = settings["SERVICE_HOST_BASE"]

    blob_service = AppendBlobService(account_name=storage_account_name_primary, account_key=storage_access_key_primary, endpoint_suffix=endpoint_suffix)
    blob_service.create_container('bosh')
    blob_service.create_container('stemcell')

    # Prepare secondary premium storage account
    storage_account_name_secondary = settings["STORAGE_ACCOUNT_NAME_SECONDARY"]
    storage_access_key_secondary = settings["STORAGE_ACCESS_KEY_SECONDARY"]
    endpoint_suffix = settings["SERVICE_HOST_BASE"]

    blob_service = AppendBlobService(account_name=storage_account_name_secondary, account_key=storage_access_key_secondary, endpoint_suffix=endpoint_suffix)
    blob_service.create_container('bosh')
    blob_service.create_container('stemcell')
Beispiel #26
0
pool_sema = threading.BoundedSemaphore(value=2 * num_cores)

# load custom filter from file
config = json.load(open(sys.argv[1]))

stats_endpoint_request = config["stats_endpoint_request"]
item_types_files = config["item_types_assets"]

planet_api_key = os.environ['PL_API_KEY']

if "blob_storage" in config:
    is_cloud_storage = True
    azure_blob_storage_account_name = os.environ['AZURE_BS_ACC_NAME']
    azure_blob_storage_API_KEY = os.environ['AZURE_BS_API_KEY']
    container_name = config["blob_storage"]["container_name"]
    append_blob_service = AppendBlobService(account_name=azure_blob_storage_account_name,\
                                          account_key=azure_blob_storage_API_KEY)
elif "download_directory" in config:
    is_cloud_storage = False
    download_directory = config["download_directory"]
else:
    raise Exception('No storage device given')

# init the session object
session = requests.Session()
# get the Planet API key from envitoment varible
session.auth = (planet_api_key, '')


# What we want to do with each page of search results
def handle_page(page):
    for feature in page["features"]:
Beispiel #27
0
class FileLoger():
    def __init__(self, device_properties):
        '''Initialize and create a CSV file
        '''
        start_time = datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
        self.device_properties = device_properties
        if not self.device_properties:
            print('No properties found')
            os._exit(1)

        self.root_folder = os.path.join(resource.get_executor_path(), r'data')
        if not os.path.exists(self.root_folder):
            os.mkdir(self.root_folder)
        self.output_packets = self.device_properties['userMessages'][
            'outputPackets']
        self.log_file_rows = {}
        self.log_file_names = {}
        self.log_files_obj = {}
        self.log_files = {}
        self.user_file_name = ''  # the prefix of log file name.
        self.msgs_need_to_log = []
        self.ws = False
        # azure app.
        self.user_id = ''
        self.file_name = ''
        self.sas_token = ''
        self.db_user_access_token = ''
        # 'http://40.118.233.18:3000/'  # TODO: set a host url
        self.host_url = get_config().ANS_PLATFORM_URL

        #
        self.threads = []  # thread of receiver and paser
        self.exit_thread = False  # flag of exit threads
        self.exit_lock = threading.Lock()  # lock of exit_thread
        self.data_dict = {}  # data container
        self.data_lock = threading.Lock()  # lock of data_queue

        self.device_log_info = None
        self.ans_platform = AnsPlatformAPI()

    def start_user_log(self, file_name='', ws=False):
        '''
        start log.
        return:
                0: OK
                1: exception that has started logging already.
                2: other exception.
        '''
        try:
            if len(self.log_file_rows) > 0:
                return 1  # has started logging already.

            self.ws = ws
            self.exit_thread = False
            self.user_file_name = file_name
            start_time = datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
            current_path = os.path.join(self.root_folder, start_time)
            if not os.path.exists(current_path):
                os.mkdir(current_path)

            for packet in self.output_packets:
                # if 1 == packet['save2file']:
                has_save2file = packet.__contains__('save2file')
                save2file = 1
                if has_save2file:
                    save2file = packet['save2file']

                if save2file == 1:
                    self.msgs_need_to_log.append(packet['name'])

                self.log_file_rows[packet['name']] = 0
                if self.user_file_name == '':
                    self.log_file_names[
                        packet['name']] = packet['name'] + '.csv'
                else:
                    self.log_file_names[packet['name']] = self.user_file_name + \
                        '_' + packet['name'] + '.csv'
                self.log_files[packet['name']] = self.log_file_names[
                    packet['name']]

                self.log_files_obj[packet['name']] = open(
                    current_path + '/' + self.log_file_names[packet['name']],
                    'w')

            if self.ws:
                self.get_sas_token()
                self.data_dict.clear()
                for i, (k, v) in enumerate(self.log_files.items()
                                           ):  # k:pack type  v:log file name
                    self.data_dict[v] = ''
                    threading.Thread(target=self.upload_azure,
                                     args=(k, v)).start()
            return 0
        except Exception as e:
            print('Exception! File:[{0}], Line:[{1}]. Exception:{2}'.format(
                __file__,
                sys._getframe().f_lineno, e))
            return 2

    def stop_user_log(self):
        '''
        stop log.
        return:
                0: OK
                1: exception that driver hasn't started logging files yet.
                2: other exception.
        '''
        rev = 0
        try:
            if len(self.log_file_rows) == 0:
                return 1  # driver hasn't started logging files yet.
            for i, (k, v) in enumerate(self.log_files_obj.items()):
                v.close()
            self.log_file_rows.clear()
            self.log_file_names.clear()
            self.log_files_obj.clear()
            rev = 0
        except Exception as e:
            print(e)
            rev = 2

        if self.ws:
            time.sleep(1)
            self.exit_lock.acquire()
            self.exit_thread = True
            self.exit_lock.release()
            self.ws = False

        return rev

    def upload_azure(self, packet_type, log_file_name):
        if self.db_user_access_token == '' or self.sas_token == '':
            print(
                "Error: Can not upload log to azure since token is empty! Please check the network."
            )

        print(datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S:'),
              log_file_name, ' start.')

        config = get_config()
        account_name = config.AZURE_STORAGE_ACCOUNT
        container_name = config.AZURE_STORAGE_DATA_CONTAINER
        url_name = datetime.datetime.now().strftime(
            '%Y_%m_%d_%H_%M_%S') + '-' + self.user_id + '-' + log_file_name
        bcreate_blob_ok = False

        error_connection = 'ConnectionError'
        error_authorization = 'AuthenticationFailed'

        while True:
            # get data from data_dict.
            self.data_lock.acquire()
            text = self.data_dict[log_file_name]
            self.data_dict[log_file_name] = ''
            self.data_lock.release()

            # check if user stop logging data.
            self.exit_lock.acquire()
            if self.exit_thread:
                # check for internet and text
                if text == '' or (not self.internet_on()):
                    self.exit_lock.release()
                    break
                else:
                    pass
            self.exit_lock.release()

            # let CPU have a break.
            if text == '':
                time.sleep(1)
                continue

            # create blob on azure
            if not bcreate_blob_ok:
                try:
                    self.append_blob_service = AppendBlobService(
                        account_name=account_name,
                        sas_token=self.sas_token,
                        protocol='http')
                    self.append_blob_service.create_blob(
                        container_name=container_name,
                        blob_name=url_name,
                        content_settings=ContentSettings(
                            content_type='text/plain'))
                    bcreate_blob_ok = True
                    threading.Thread(target=self.save_to_db_task,
                                     args=(packet_type, log_file_name,
                                           url_name)).start()
                except Exception as e:
                    # print('Exception when create_blob:', type(e), e)
                    if error_connection in str(e):
                        pass
                    elif error_authorization in str(e):
                        self.get_sas_token()
                        self.append_blob_service = AppendBlobService(
                            account_name=account_name,
                            sas_token=self.sas_token,
                            protocol='http')
                    print('Retry to create_blob again...')
                    continue

            # append blob on azure
            try:
                # self.append_blob_service.append_blob_from_text(countainerName, fileName, text, progress_callback=self.upload_callback)
                self.append_blob_service.append_blob_from_text(
                    container_name, url_name, text)
            except Exception as e:
                # print('Exception when append_blob:', type(e), e)
                if error_connection in str(e):
                    pass
                elif error_authorization in str(e):
                    self.get_sas_token()
                    self.append_blob_service = AppendBlobService(
                        account_name=account_name,
                        sas_token=self.sas_token,
                        protocol='http')
                    # if append blob failed, do not drop 'text', but push 'text' to data_dict and re-append next time.
                    self.data_lock.acquire()
                    self.data_dict[log_file_name] = text + \
                        self.data_dict[log_file_name]
                    self.data_lock.release()

        if bcreate_blob_ok:
            # if not self.save_to_ans_platform(packet_type, log_file_name):
            #     print('save_to_ans_platform failed.')
            print(datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S:'),
                  log_file_name, ' done.')

    def save_to_db_task(self, packet_type, file_name, url_name):
        if not self.save_to_ans_platform(packet_type, file_name, url_name):
            print('save_to_ans_platform failed.')

    def append(self, packet_type, packet):
        if len(self.log_file_rows) == 0:  # if hasn't started logging.
            return

        if packet_type in self.msgs_need_to_log:
            self.log(packet_type, packet)

    def get_log_file_names(self):
        return self.log_file_names.copy()

    def log(self, packet_type, data):
        ''' Parse the data, read in from the unit, and generate a data file using
            the json properties file to create a header and specify the precision
            of the data in the resulting data file.
        '''
        output_packet = next(
            (x for x in self.output_packets if x['name'] == packet_type), None)

        fields = [field['name'] for field in output_packet['payload']]
        '''Write row of CSV file based on data received.  Uses dictionary keys for column titles
        '''
        if self.log_file_rows[packet_type] == 0:
            # Loop through each item in the data dictionary and create a header from the json
            #   properties that correspond to the items in the dictionary
            labels = ''
            # for key in data:
            for i, (k, v) in enumerate(data.items()):
                '''dataStr = output_packet['payload'][i]['name'] + \
                          ' [' + \
                          output_packet['payload'][i]['unit'] + \
                          ']'''
                if not fields.__contains__(k):
                    continue
                data_str = output_packet['payload'][i]['name']
                unit_str = output_packet['payload'][i]['unit']
                if unit_str == '':
                    labels = labels + '{0:s},'.format(data_str)
                else:
                    labels = labels + \
                        '{0:s} ({1:s}),'.format(data_str, unit_str)

            # Remove the comma at the end of the string and append a new-line character
            labels = labels[:-1]
            header = labels + '\n'
        else:
            header = ''

        self.log_file_rows[packet_type] += 1

        # Loop through the items in the data dictionary and append to an output string
        #   (with precision based on the data type defined in the json properties file)
        str = ''
        for i, (k, v) in enumerate(data.items()):
            if not fields.__contains__(k):
                continue
            output_packet_type = output_packet['payload'][i]['type']

            if output_packet['payload'][i].__contains__('scaling'):
                str += '{0},'.format(v)
            else:
                if output_packet_type == 'uint32' or output_packet_type == 'int32' or \
                        output_packet_type == 'uint16' or output_packet_type == 'int16' or \
                        output_packet_type == 'uint64' or output_packet_type == 'int64':
                    # integers and unsigned integers
                    str += '{0:d},'.format(v)
                elif output_packet_type == 'double':
                    # double
                    str += '{0:0.8f},'.format(v)  # 15.12
                elif output_packet_type == 'float':
                    str += '{0:0.4f},'.format(v)  # 12.8
                elif output_packet_type == 'uint8':
                    # byte
                    str += '{0:d},'.format(v)
                elif output_packet_type == 'uchar' or output_packet_type == 'char' or output_packet_type == 'string':
                    # character
                    str += '{:},'.format(v)
                else:
                    # unknown
                    str += '{0:3.5f},'.format(v)
        #
        str = header + str[:-1] + '\n'

        self.log_files_obj[packet_type].write(str)
        self.log_files_obj[packet_type].flush()

        if self.ws:
            self.data_lock.acquire()
            self.data_dict[self.log_files[packet_type]] = self.data_dict[
                self.log_files[packet_type]] + str
            self.data_lock.release()

    def set_info(self, info):
        self.device_log_info = info
        pass

    def set_user_id(self, user_id):
        self.user_id = user_id
        if not isinstance(self.user_id, str):
            self.user_id = str(self.user_id)

    def set_user_access_token(self, access_token):
        self.db_user_access_token = access_token

    def get_sas_token(self):
        try:
            self.ans_platform.set_access_token(self.db_user_access_token)
            self.sas_token = self.ans_platform.get_sas_token()
        except Exception as e:
            self.sas_token = ''
            print('Exception when get_sas_token:', e)

    def save_to_ans_platform(self, packet_type, file_name, url_name):
        ''' Upload CSV related information to the database.
        '''
        if not self.device_log_info:
            return False

        try:
            self.device_log_info['fileName'] = file_name
            self.device_log_info['url'] = url_name
            self.device_log_info['userId'] = self.user_id
            self.device_log_info['logInfo']['packetType'] = packet_type
            data = self.device_log_info

            # data = {
            #     "type": self.device_log_info['type'],
            #     "model": self.device_log_info['name'],
            #     "fileName": file_name,
            #     "url": file_name,
            #     "userId": self.user_id,
            #     "logInfo": {
            #             "pn": self.device_log_info['pn'],
            #             "sn": self.device_log_info['sn'],
            #             "packetType": packet_type,
            #             "insProperties": json.dumps(self.device_properties)
            #     }
            # }

            self.ans_platform.set_access_token(self.db_user_access_token)
            return self.ans_platform.save_record_log(data)
        except Exception as e:
            print('Exception when update db:', e)

    def internet_on(self):
        try:
            url = 'https://navview.blob.core.windows.net/'
            if sys.version_info[0] > 2:
                import urllib.request
                response = urllib.request.urlopen(url, timeout=1)
            else:
                import urllib2
                response = urllib2.urlopen(url, timeout=1)
            # print(response.read())
            return True
        except urllib2.URLError as err:
            return False
Beispiel #28
0
# Usage: Call python3 controller.py X, where X is the number of SLURM
# jobs you SLURM to spawn on the SLURM nodes

import csv
import sys
import subprocess
import datetime
import time

from azure.storage.blob import AppendBlobService

# Configure account name with the Azure Storage Account Name and the account Key from Storage Explorer
append_blob_service = AppendBlobService(account_name='storage_account_name',
                                        account_key='storage_account_key')

# Creates an append blob for this app.
append_blob_service.create_container('distances')
append_blob_service.create_blob('distances', 'log.txt')

append_blob_service.append_blob_from_text(
    'distances', 'log.txt', "Starting: " + datetime.datetime.fromtimestamp(
        time.time()).strftime('%Y-%m-%d %H:%M:%S') + "\n")

LatLongDict = {}

# Reads the number of jobs from the command line.
jobCount = int(sys.argv[1])

# Reads the airport data in to a list for easy access.
with open('airports-world.csv') as csvfile:
    reader = csv.DictReader(csvfile)
Beispiel #29
0
def TryParseCursor(cursor):
    try:
        return list(int(s) for s in cursor.split('.', 2))
    except Exception:
        logger.exception('Failed to parse cursor %s'.format(cursor))
        return None


if config.get("logging") == 'azure_blob':
    logger.info('Azure Blob log backend is enabled.')

    from azure.storage.blob import AppendBlobService
    from azure.common import AzureHttpError

    append_blob_service = AppendBlobService(
        connection_string=config['azure_blob_log']['connection_string'])
    container_name = config['azure_blob_log']['container_name']

    CHUNK_SIZE = 1024 * 1024  # Assume each line in log is no more then 1MB

    def GetJobLog(jobId, cursor=None, size=None):
        try:
            prefix = 'jobs.' + jobId

            lines = []

            try:
                blobs = append_blob_service.list_blobs(
                    container_name=container_name, prefix=prefix)
                blobs = list(blobs)
class Results(object):
    """
    Handles interacting with encrypted results in blob storage.
    """
    def __init__(self, logger, redisHost, redisPort):
        """
        Initializes a new instance of the JobStatus class.

        :param logger logger: The logger instance to use for logging
        :param str redis_host: Redis host where the Redis Q is running
        :param int redis_port: Redis port where the Redis Q is running
        """
        self.logger = logger
        self.config = Config()
        self.redis_host = redisHost
        self.redis_port = redisPort
        # create an instance of AESCipher to use for encryption
        aesHelper = AESHelper(self.config)
        self.aescipher = aesHelper.create_aescipher_from_config()
        if (self.init_storage_services() is False):
            raise Exception(
                "Errors occurred instantiating results storage service.")

    def init_storage_services(self):
        """
        Initializes the storage service clients using values from config.py.
        :return: True on success. False on failure.
        :rtype: boolean
        """
        try:
            # creates instance of BlockBlobService and AppendBlobService to use for completed results storage
            self.storage_service = BlockBlobService(
                account_name=self.config.storage_account_name,
                sas_token=self.config.results_container_sas_token)
            self.append_storage_service = AppendBlobService(
                account_name=self.config.storage_account_name,
                sas_token=self.config.results_container_sas_token)
            self.storage_service.create_container(
                self.config.results_container_name)

            # creates instances of Azure QueueService
            self.job_status_queue_service = QueueService(
                account_name=self.config.storage_account_name,
                sas_token=self.config.job_status_queue_sas_token)
            self.job_status_queue_service.encode_function = models.QueueMessageFormat.noencode
            self.results_queue_service = QueueService(
                account_name=self.config.storage_account_name,
                sas_token=self.config.results_queue_sas_token)
            self.results_queue_service.create_queue(
                self.config.results_container_name)
            self.results_queue_service.encode_function = models.QueueMessageFormat.noencode

            # creates instance of Redis client to use for job status storage
            pool = redis.ConnectionPool(host=self.redis_host,
                                        port=self.redis_port)
            self.storage_service_cache = redis.Redis(connection_pool=pool)

            return True
        except Exception as ex:
            self.log_exception(ex, self.init_storage_services.__name__)
            return False

    def log_exception(self, exception, functionName):
        """
        Logs an exception to the logger instance for this class.

        :param Exception exception: The exception thrown.
        :param str functionName: Name of the function where the exception occurred.
        """
        self.logger.debug("Exception occurred in: " + functionName)
        self.logger.debug(type(exception))
        self.logger.debug(exception)

    def write_result(self, result):
        """
        Encrypts and writes result to queue

        :param str result: The result to write to queue
        :return: True on success. False on failure.
        :rtype: boolean
        """
        try:
            # encrypt the encoded result and then encode it
            encryptedResult = base64.b64encode(self.aescipher.encrypt(result))

            # put the encoded result into the azure queue for future consolidation
            self.results_queue_service.put_message(
                self.config.results_queue_name, encryptedResult)

            return True
        except Exception as ex:
            self.log_exception(ex, self.write_result.__name__)
            return False

    def count_consolidated_results(self):
        """
        Returns a count of results that were consolidated.

        "return: int count: Total count of results that were consolidated.
        """
        try:
            consolidatedResults = self.storage_service_cache.get(
                self.config.results_consolidated_count_redis_key)
            return consolidatedResults
        except Exception as ex:
            self.log_exception(ex, self.count_consolidated_results.__name__)
            return False

        except Exception as ex:
            self.log_exception(
                ex, self.consolidate_results.__name__ +
                " - Error consolidating result blob.")

    def consolidate_results(self):
        """
        Consolidates all individual result files into single result file in storage. Blobs are deleted once they
        are added to the consolidated file.

        "return: int count: Total count of results consolidated in result file.
        """
        try:
            # ensure the consolidated append blob exists
            if not self.append_storage_service.exists(
                    self.config.results_container_name,
                    blob_name=self.config.results_consolidated_file):
                self.append_storage_service.create_blob(
                    self.config.results_container_name,
                    self.config.results_consolidated_file)

            result_messages = []
            with io.BytesIO() as consolidated_result:
                while len(result_messages
                          ) < self.config.result_consolidation_size:
                    messages = self.results_queue_service.get_messages(
                        self.config.results_queue_name,
                        min(self.config.result_consolidation_size, 32))

                    # If the queue is empty, stop and consolidate
                    if not messages:
                        break

                    # add the message to the memory stream
                    for msg in messages:
                        consolidated_result.write(msg.content + "\n")
                        result_messages.append(msg)

                # append the results to the consolidated file
                consolidated_result.seek(0)
                self.append_storage_service.append_blob_from_stream(
                    self.config.results_container_name,
                    self.config.results_consolidated_file, consolidated_result)

            # remove all of the messages from the queue
            num_of_consolidated_results = len(result_messages)
            for msg in result_messages:
                self.results_queue_service.delete_message(
                    self.config.results_queue_name, msg.id, msg.pop_receipt)
            self.storage_service_cache.incrby(
                self.config.results_consolidated_count_redis_key,
                num_of_consolidated_results)

            # write the count of results we consolidated out to queue to provide status
            self.job_status_queue_service.put_message(
                self.config.job_status_queue_name,
                str(num_of_consolidated_results) + " results consolidated.")

            return len(result_messages)

        except Exception as ex:
            self.log_exception(ex, self.consolidate_results.__name__)
            return len(result_messages)

    def get_total_jobs_consolidated_status(self):
        """
        Write out the the current state of the workload; the percentage of jobs that are completed and consolidated
        "return: float status: percentage of completed jobs
        """
        # log out total job status
        total_scheduled_jobs = self.storage_service_cache.get(
            self.config.scheduled_jobs_count_redis_key)
        total_consolidated_results = self.storage_service_cache.get(
            self.config.results_consolidated_count_redis_key)

        if total_consolidated_results is None:
            total_consolidated_results = "0"

        status_message = "Total: " + total_consolidated_results + "/" + total_scheduled_jobs + " jobs have been successfully processed and consolidated."
        self.logger.info(status_message)
        self.job_status_queue_service.put_message(
            self.config.job_status_queue_name, status_message)

        return float(total_consolidated_results) / int(total_scheduled_jobs)
Beispiel #31
0
    def setUp(self):
        self.loop = asyncio.get_event_loop()

        self.bs = AppendBlobService(account_name=ACCOUNT_NAME,
                                    account_key=ACCOUNT_KEY)
Beispiel #32
0
# generator = block_blob_service.list_blobs('mycontainer')
# for blob in generator:
#     print(blob.name)


# block_blob_service.get_blob_to_path('mycontainer', 'myblockblob', 'out-sunset.png')


# block_blob_service.delete_blob('mycontainer', 'myblockblob')





from azure.storage.blob import AppendBlobService

append_blob_service = AppendBlobService(account_name='bobur', account_key='6e60FZapOXAmUbFBw0SpE1lHRP3RkXOMYRaalWmRBoz4+xI5tvjaJzxXuYyt+yfWxjPXpz5X3PmyIFiQmSkjbw==')

# The same containers can hold all types of blobs
append_blob_service.create_container('myappendcontainer')

# Append blobs must be created before they are appended to
append_blob_service.create_blob('myappendcontainer', 'myappendblob')
append_blob_service.append_blob_from_text('myappendcontainer', 'myappendblob', u'Hello, world!')

append_blob = append_blob_service.get_blob_to_text('myappendcontainer', 'myappendblob')

print append_blob.content

Beispiel #33
0
import csv
import sys
from math import sin, cos, sqrt, atan2, radians
import datetime
import time
import storageconfig as cfg

from azure.storage.blob import AppendBlobService

# Configure account name with the Azure Storage Account Name and the account Key from Storage Explorer
append_blob_service = AppendBlobService(
    account_name=cfg.storage['storage_account_name'],
    account_key=cfg.storage['storage_account_key'])


# Reads the start and stop index passed in through SLURM
start = int(sys.argv[1])
stop = int(sys.argv[2])

# Creates the blob for this batch.
append_blob_service.create_blob(
    'distances', str(start) + "-" + str(stop) + '.csv')


# Logs the start time
append_blob_service.append_blob_from_text(
    cfg.storage['container_name'],
    cfg.storage['blob_name'], "Starting " + str(start) + "-" + str(
        stop) + ":" + datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S') + "\n")

      FROM Kwinana_Plc_notification t1 join Kwinana_Plc_refference_data
      t2 on t1.Commodity = t2.Commodity order by AutoId DESC"""


def wa_timenow():
    #get current perth time
    dtn = datetime.now() + timedelta(hours=8)
    return dtn


cursor.execute(sql)
row = cursor.fetchone()
outval = ''
if row[0] == None:
    outval = ",".join([str(i) for i in row[1:]])

    #connect to request-log blob and log request
    append_blob_service = AppendBlobService(account_name=myaccount,
                                            account_key=mykey)
    append_blob_service.append_blob_from_text(
        'requestlogs', 'request.txt', "%s,%s " % (wa_timenow(), outval))
    append_blob_service.append_blob_from_text('requestlogs', 'request.txt',
                                              "\n")
    append_blob = append_blob_service.get_blob_to_text('requestlogs',
                                                       'request.txt')

#send response
response = open(os.environ['res'], 'w')

response.write(outval)
response.close()
class FileLoger():
    def __init__(self):
        '''Initialize and create a CSV file
        '''
        start_time = datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
        self.rover_properties = utility.load_configuration(os.path.join('setting', 'rover.json'))
        if not self.rover_properties:
            os._exit(1)
        if not os.path.exists('data/'):
            os.mkdir('data/')
        self.output_packets = self.rover_properties['userMessages']['outputPackets']
        self.log_file_rows = {}
        self.log_file_names = {}
        self.log_files_obj = {}
        self.log_files = {}
        self.user_file_name = '' # the prefix of log file name.
        self.msgs_need_to_log = []
        self.ws = False
        # azure app.
        self.user_id = ''
        self.file_name = ''
        self.sas_token = '' 
        self.db_user_access_token = ''
        self.host_url = self.rover_properties['userConfiguration']['hostURL']

        #
        self.threads = []  # thread of receiver and paser
        self.exit_thread = False  # flag of exit threads
        self.exit_lock = threading.Lock()  # lock of exit_thread
        self.data_dict = {}  # data container
        self.data_lock = threading.Lock()  # lock of data_queue

    def start_user_log(self, file_name='', ws=False):
        '''
        start log.
        return:
                0: OK
                1: exception that has started logging already.
                2: other exception.
        '''
        try:
            if len(self.log_file_rows) > 0:
                return 1 # has started logging already.

            self.ws = ws
            self.exit_thread = False
            self.user_file_name = file_name
            start_time = datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
            for packet in self.output_packets:
                if 1 == packet['save2file']:
                    self.msgs_need_to_log.append(packet['name'])
                    self.log_file_rows[packet['name']] = 0
                    if self.user_file_name == '':
                        self.log_file_names[packet['name']] = packet['name'] +'-' + start_time + '.csv'
                    else:
                        self.log_file_names[packet['name']] = self.user_file_name + '_' + packet['name'] +'-' + start_time + '.csv'
                    self.log_files[packet['name']] = self.log_file_names[packet['name']]
                    self.log_files_obj[packet['name']] = open('data/' + self.log_file_names[packet['name']], 'w')    

            if self.ws:
                self.get_sas_token()
                self.data_dict.clear()
                for i, (k, v) in enumerate(self.log_files.items()): # k:pack type  v:log file name
                    self.data_dict[v]=''
                    threading.Thread(target=self.upload_azure, args=(k,v)).start()
            return 0
        except Exception as e:
            print('Exception! File:[{0}], Line:[{1}]. Exception:{2}'.format(__file__, sys._getframe().f_lineno, e))
            return 2

    def stop_user_log(self):
        '''
        stop log.
        return:
                0: OK
                1: exception that driver hasn't started logging files yet.
                2: other exception.
        '''
        rev = 0
        try:
            if len(self.log_file_rows) == 0:
                return 1 # driver hasn't started logging files yet.
            for i, (k, v) in enumerate(self.log_files_obj.items()):
                v.close()
            self.log_file_rows.clear()
            self.log_file_names.clear()
            self.log_files_obj.clear()
            rev = 0
        except Exception as e:
            print(e)
            rev = 2

        if self.ws:
            time.sleep(1)
            self.exit_lock.acquire()
            self.exit_thread = True
            self.exit_lock.release()
            self.ws = False
            
        return rev

    # def upload_callback(self, current, total):
    #     print('({}, {})'.format(current, total))

    def upload_azure(self, packet_type, log_file_name):
        if self.db_user_access_token == '' or self.sas_token == '':
            print("Error: Can not upload log to azure since token is empty! Please check the network.")
            
        print(datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S:'), log_file_name, ' start.')

        accountName = 'navview'
        countainerName = 'data-1000'
        fileName = log_file_name
        bcreate_blob_ok = False

        error_connection = 'ConnectionError'
        error_authorization = 'AuthenticationFailed'
        ii=0
        while True:
            # get data from data_dict.
            self.data_lock.acquire()
            text = self.data_dict[log_file_name]
            self.data_dict[log_file_name] = ''
            self.data_lock.release()

            # check if user stop logging data.
            self.exit_lock.acquire()
            if self.exit_thread:
                # check for internet and text
                if text == '' or (not self.internet_on()):
                    self.exit_lock.release()
                    break
                else:
                    pass
            self.exit_lock.release()

            #let CPU have a break.
            if text == '' : 
                time.sleep(1)
                continue

            #create blob on azure
            if not bcreate_blob_ok:
                try:
                    self.append_blob_service = AppendBlobService(account_name=accountName,
                                                                sas_token=self.sas_token,
                                                                protocol='http')
                    self.append_blob_service.create_blob(container_name=countainerName, blob_name=fileName,
                                                        content_settings=ContentSettings(content_type='text/plain'))
                    bcreate_blob_ok = True
                    threading.Thread(target=self.save_to_db_task, args=(packet_type, log_file_name)).start()
                except Exception as e:
                    # print('Exception when create_blob:', type(e), e)
                    if error_connection in str(e):
                        pass
                    elif error_authorization in str(e):
                        self.get_sas_token()
                        self.append_blob_service = AppendBlobService(account_name=accountName,
                                                                    sas_token=self.sas_token,
                                                                    protocol='http')
                    print('Retry to create_blob again...')
                    continue

            # append blob on azure
            try:
                # self.append_blob_service.append_blob_from_text(countainerName, fileName, text, progress_callback=self.upload_callback)
                self.append_blob_service.append_blob_from_text(countainerName, fileName, text)
            except Exception as e:
                # print('Exception when append_blob:', type(e), e)
                if error_connection in str(e):
                    pass
                elif error_authorization in str(e):
                    self.get_sas_token()
                    self.append_blob_service = AppendBlobService(account_name=accountName,
                                                                sas_token=self.sas_token,
                                                                protocol='http')
                    # if append blob failed, do not drop 'text', but push 'text' to data_dict and re-append next time.
                    self.data_lock.acquire()
                    self.data_dict[log_file_name] = text + self.data_dict[log_file_name]
                    self.data_lock.release()

        if bcreate_blob_ok:
            # if not self.save_to_ans_platform(packet_type, log_file_name):
            #     print('save_to_ans_platform failed.')
            print(datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S:') , log_file_name, ' done.')

    def save_to_db_task(self, packet_type, file_name):
        if not self.save_to_ans_platform(packet_type, file_name):
            print('save_to_ans_platform failed.')

    def update(self, packet, packet_type, is_var_len_frame):
        if len(self.log_file_rows) == 0: #if hasn't started logging.
            return

        if packet_type in self.msgs_need_to_log:
            if is_var_len_frame:
                self.log_var_len(packet, packet_type)
            else:
                self.log(packet, packet_type)

    def get_log_file_names(self):
        return self.log_file_names.copy()

    def log(self, data, packet_type):
        ''' Parse the data, read in from the unit, and generate a data file using
            the json properties file to create a header and specify the precision
            of the data in the resulting data file.
        '''
        output_packet = next((x for x in self.output_packets if x['name'] == packet_type), None)

        '''Write row of CSV file based on data received.  Uses dictionary keys for column titles
        '''
        if self.log_file_rows[packet_type] == 0:
            # Loop through each item in the data dictionary and create a header from the json
            #   properties that correspond to the items in the dictionary
            labels = ''
            # for key in data:
            for i, (k, v) in enumerate(data.items()):
                '''dataStr = output_packet['payload'][i]['name'] + \
                          ' [' + \
                          output_packet['payload'][i]['unit'] + \
                          ']'''
                dataStr = output_packet['payload'][i]['name']
                unitStr = output_packet['payload'][i]['unit']
                if unitStr == '':
                    labels = labels + '{0:s},'.format(dataStr)
                else:
                    labels = labels + '{0:s} ({1:s}),'.format(dataStr, unitStr)

            # Remove the comma at the end of the string and append a new-line character
            labels = labels[:-1]
            header = labels + '\n'
        else:
            header = ''

        self.log_file_rows[packet_type] += 1

        # Loop through the items in the data dictionary and append to an output string
        #   (with precision based on the data type defined in the json properties file)
        str = ''
        for i, (k, v) in enumerate(data.items()):
            outputPcktType = output_packet['payload'][i]['type']

            if outputPcktType == 'uint32' or outputPcktType == 'int32' or \
               outputPcktType == 'uint16' or outputPcktType == 'int16' or \
               outputPcktType == 'uint64' or outputPcktType == 'int64':
                # integers and unsigned integers
                str += '{0:d},'.format(v)
            elif outputPcktType == 'double':
                # double
                str += '{0:0.8f},'.format(v)# 15.12
            elif outputPcktType == 'float':
                str += '{0:0.4f},'.format(v) # 12.8
            elif outputPcktType == 'uint8':
                # byte
                str += '{0:d},'.format(v)
            elif outputPcktType == 'uchar' or outputPcktType == 'char' or outputPcktType == 'string':
                # character
                str += '{:},'.format(v)
            else:
                # unknown
                str += '{0:3.5f},'.format(v)
        # 
        str = header + str[:-1] + '\n'

        self.log_files_obj[packet_type].write(str)
        self.log_files_obj[packet_type].flush()

        if self.ws:
            self.data_lock.acquire()
            self.data_dict[self.log_files[packet_type]] = self.data_dict[self.log_files[packet_type]] + str
            self.data_lock.release()

    def log_var_len(self, data, packet_type):
        ''' Parse the data, read in from the unit, and generate a data file using
            the json properties file to create a header and specify the precision
            of the data in the resulting data file.
        '''
        output_packet = next((x for x in self.output_packets if x['name'] == packet_type), None)

        '''Write row of CSV file based on data received.  Uses dictionary keys for column titles
        '''
        if self.log_file_rows[packet_type] == 0:

            # Loop through each item in the data dictionary and create a header from the json
            #   properties that correspond to the items in the dictionary
            labels = ''
            for value in output_packet['payload']:
                dataStr = value['name']
                unitStr = value['unit']
                if unitStr == '':
                    labels = labels + '{0:s},'.format(dataStr)
                else:
                    labels = labels + '{0:s} ({1:s}),'.format(dataStr, unitStr)
            # Remove the comma at the end of the string and append a new-line character
            labels = labels[:-1]
            header = labels + '\n'
        else:
            header = ''

        self.log_file_rows[packet_type] += 1

        # Loop through the items in the data dictionary and append to an output string
        #   (with precision based on the data type defined in the json properties file)
        str = ''
        const_str = ''
        var_str = ''
        var_fileld_tpyes = []
        var_fileld_num = len(output_packet['payload']) - output_packet['var_num']['field_idx']
        const_fileld_num = len(output_packet['payload']) - var_fileld_num

        for idx, value in enumerate(output_packet['payload']):
            if idx >= const_fileld_num:
                var_fileld_tpyes.append(value['type'])

        for idx, key in enumerate(data):
            if idx == 0: # handle const filelds which are all in the first item of data.
                for i, (k, v) in enumerate(key.items()):
                    outputPcktType = output_packet['payload'][i]['type']

                    if outputPcktType == 'uint32' or outputPcktType == 'int32' or \
                    outputPcktType == 'uint16' or outputPcktType == 'int16' or \
                    outputPcktType == 'uint64' or outputPcktType == 'int64':
                        # integers and unsigned integers
                        const_str += '{0:d},'.format(v)
                    elif outputPcktType == 'double':
                        # double
                        const_str += '{0:0.12f},'.format(v) # 15.12
                    elif outputPcktType == 'float':
                        const_str += '{0:0.4f},'.format(v) # 12.8
                    elif outputPcktType == 'uint8':
                        # byte
                        const_str += '{0:d},'.format(v)
                    elif outputPcktType == 'uchar' or outputPcktType == 'char' or outputPcktType == 'string':
                        # character
                        const_str += '{:},'.format(v)
                    else:
                        # unknown
                        const_str += '{0:3.5f},'.format(v)
            else:
                for i, (k, v) in enumerate(key.items()):
                    outputPcktType = var_fileld_tpyes[i]
                    if outputPcktType == 'uint32' or outputPcktType == 'int32' or \
                    outputPcktType == 'uint16' or outputPcktType == 'int16' or \
                    outputPcktType == 'uint64' or outputPcktType == 'int64':
                        # integers and unsigned integers
                        var_str += '{0:d},'.format(v)
                    elif outputPcktType == 'double':
                        # double
                        var_str += '{0:15.12f},'.format(v)# 15.12
                    elif outputPcktType == 'float':
                        var_str += '{0:12.4f},'.format(v) # 12.8
                    elif outputPcktType == 'uint8':
                        # byte
                        var_str += '{0:d},'.format(v)
                    elif outputPcktType == 'uchar' or outputPcktType == 'char':
                        # character
                        var_str += '{:},'.format(v)
                    else:
                        # unknown
                        var_str += '{0:3.5f},'.format(v)

                str = const_str + var_str
                str = header + str[:-1] + '\n'

                self.log_files_obj[packet_type].write(str)
                self.log_files_obj[packet_type].flush()

                if self.ws:
                    self.data_lock.acquire()
                    self.data_dict[self.log_files[packet_type]] = self.data_dict[self.log_files[packet_type]] + str
                    self.data_lock.release()

                header = ''
                str = ''
                var_str = ''

    def set_user_id(self, user_id):
        self.user_id = user_id
        if not isinstance(self.user_id, str):
            self.user_id = str(self.user_id)

    def set_user_access_token(self, access_token):
        self.db_user_access_token = access_token

    def upload(self, log_file_names):
        t = threading.Thread(target=self.upload_to_azure_task, args=(log_file_names, ))
        t.start()

    def upload_to_azure_task(self, log_files_dict):
        self.get_sas_token()
        if self.db_user_access_token != '' and self.sas_token != '':
            print(datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S:') , 'Start.')
            for i, (k, v) in enumerate(log_files_dict.items()): # k: packet type; v: log file name
                print('upload:', v)
                self.upload_to_azure(k, v)
            # self.db_user_access_token = ''
            # self.sas_token = ''    
            print(datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S:') , 'Done.')

    def get_sas_token(self):
        try:
            url = self.host_url + "token/storagesas"
            headers = {'Content-type': 'application/json', 'Authorization': self.db_user_access_token}
            response = requests.post(url, headers=headers)
            rev = response.json()
            if 'token' in rev:
                self.sas_token = rev['token']
            else:
                self.sas_token = ''
                print('Error: Get sas token failed!')
        except Exception as e:
            print('Exception when get_sas_token:', e)

    def upload_to_azure(self, packet_type, file_name):
        ''' Upload CSV's to Azure container.
        '''
        f = open("data/" + file_name, "r")
        text = f.read() #.decode("utf-8")

        try:
            self.azure_storage('navview', self.sas_token, 'data', file_name, text)
        except Exception as e:
            print('azure_storage exception:', e)
            return
            # Try again!
            # self.azure_storage('navview', self.sas_token, 'data', file_name, text)
            pass

        ''' Trigger Database upload
        '''
        rev = self.save_to_ans_platform(packet_type, file_name)
        if not rev:
            print('save_to_ans_platform failed.')

    def azure_storage(self, accountName, sasToken, countainerName,fileName,text):
        if 0:
            self.append_blob_service = AppendBlobService(account_name=accountName,
                                                        sas_token=sasToken,
                                                        protocol='http')
            self.append_blob_service.create_blob(container_name=countainerName, blob_name=fileName,
                                                content_settings=ContentSettings(content_type='text/plain'))
            self.append_blob_service.append_blob_from_text(countainerName, fileName, text)
        else:
            self.block_blob_service = BlockBlobService(account_name=accountName,
                                                        sas_token=sasToken,
                                                        protocol='http')
            self.block_blob_service.create_blob_from_text(  container_name= countainerName,
                                                        blob_name= fileName,
                                                        text=text,
                                                        content_settings=ContentSettings(content_type='text/plain'))

    def save_to_ans_platform(self, packet_type, file_name):
        ''' Upload CSV related information to the database.
        '''
        try:
            data = {"type": 'INS', "model": 'INS1000', "fileName": file_name, "url": file_name, "userId": self.user_id, 
                    "logInfo": { "pn": '11', "sn": '', "packetType":packet_type,"insProperties":json.dumps(self.rover_properties)}}

            url = self.host_url + "api/recordLogs/post"
            data_json = json.dumps(data)
            headers = {'Content-type': 'application/json', 'Authorization': self.db_user_access_token}
            response = requests.post(url, data=data_json, headers=headers)
            return True if 'success' in response.json() else False
        except Exception as e:
            print('Exception when update db:', e)

    def internet_on(self):
        try:
            url = 'https://navview.blob.core.windows.net/'
            if sys.version_info[0] > 2:
                import urllib.request
                response = urllib.request.urlopen(url, timeout=1)
            else:
                import urllib2
                response = urllib2.urlopen(url, timeout=1)
            # print(response.read())
            return True
        except urllib2.URLError as err: 
            return False
Beispiel #36
0
import datetime
import pandas as pd
input = pd.read_csv("auto.csv")
# input.Symbol[i] input.Series.size is the size
all_stock_codes = input.Symbol
from nsetools import Nse
nse = Nse()
outputlist = []
for script_code in all_stock_codes:
    try:
        print(script_code)
        q = nse.get_quote(script_code)
        outputlist.append(q)
        print(q)
# Write to a list
    except:
        print("Unexpected error:",
              sys.exc_info()[0])

jsonObject = json.dumps(outputlist)
from azure.storage.blob import AppendBlobService
append_blob_service = AppendBlobService(
    account_name='aphrostock',
    account_key=
    'cwAlI7P6WfUqijt0jMP+0CCqM54hmoHRofxqdj9PqPamtIfdm9vRUpC+jrpRv/Idma61sSg7NDIvFwkyhMm7KQ=='
)
containerName = str(datetime.datetime.now())
append_blob_service.create_blob('stockdata', containerName)
append_blob_service.append_blob_from_text('stockdata', containerName,
                                          jsonObject)
Beispiel #37
0
# Usage: Call python3 controller.py X, where X is the number of SLURM 
# jobs you SLURM to spawn on the SLURM nodes

import csv
import sys
import subprocess
import datetime
import time

from azure.storage.blob import AppendBlobService

# Configure account name with the Azure Storage Account Name and the account Key from Storage Explorer
append_blob_service = AppendBlobService(
	account_name='storage_account_name', 
	account_key='storage_account_key')

# Creates an append blob for this app.
append_blob_service.create_container('distances')
append_blob_service.create_blob('distances', 'log.txt')

append_blob_service.append_blob_from_text('distances', 'log.txt', "Starting: " + datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S') + "\n")

LatLongDict = {}

# Reads the number of jobs from the command line.
jobCount = int(sys.argv[1])

# Reads the airport data in to a list for easy access.
with open('airports-world.csv') as csvfile:
	reader = csv.DictReader(csvfile)
	for row in reader:
Beispiel #38
0
def main():
    logging.basicConfig(level=logging.DEBUG)
    with open(TASKDATA) as taskdata_file:
        taskdata = json.loads(taskdata_file.read())
    github = GithubAdapter(taskdata)
    gh_commit = github.get_commit()
    config = github.get_config()
    blob_service = AppendBlobService(
        account_name=taskdata["storage_account_name"],
        account_key=taskdata["storage_account_key"])
    queue_service = QueueService(
        connection_string=taskdata["queue_connection_string"])
    loop = asyncio.get_event_loop()
    ctx = Context(loop=loop,
                  config=config,
                  blob_service=blob_service,
                  queue_service=queue_service,
                  taskdata=taskdata)

    blob_service.create_container("logs",
                                  fail_on_exist=False,
                                  public_access=PublicAccess.Blob)
    blob_service.create_blob("logs",
                             ctx.pid,
                             content_settings=ContentSettings(
                                 content_type="text/plain; charset=utf-8"))
    gh_commit.create_status(
        "pending",
        target_url=blob_service.make_blob_url("logs", ctx.pid),
        description="Build started",
        context=config.get("configuration_name",
                           "configuration%s" % taskdata["config_num"]))
    os.makedirs(REPOSDIR, exist_ok=True)
    # Check if we're the only process who updates the git cache on SMB share.
    # Otherwise skip updating.
    if not os.path.exists(LOCKFILENAME):
        lock = open(LOCKFILENAME, "w")
        lock.close()
        update_git_cache(ctx)
        os.unlink(LOCKFILENAME)

    if os.path.exists(SRCDIR):
        shutil.rmtree(SRCDIR)
    os.makedirs(os.path.join(SRCDIR, "build/conf"))
    with open(os.path.join(SRCDIR, "build/conf/auto.conf"), "a") as localconf:
        localconf.write("\n%s\n" % config.get("localconf", ""))
        localconf.write(AUTOCONFIG)

    repos = get_repos(config)
    repos.append((repodirname(taskdata["gh"]["repository"]["clone_url"]),
                  taskdata["gh"]["repository"]["clone_url"], None, None))
    for reponame, repourl, reporef, _ in repos:
        refrepopath = os.path.join(REPOSDIR, reponame)
        run(ctx,
            ["git", "clone", "--reference", refrepopath, repourl, reponame],
            cwd=SRCDIR)
        if reporef:
            LOG.info("Checkout %s to %s" % (reponame, reporef))
            run(ctx, ["git", "checkout", "%s" % reporef],
                cwd=os.path.join(SRCDIR, reponame))

    # Do checkout
    if taskdata["gh"]["type"] == "pull_request":
        LOG.info("Add remote repo %s" % taskdata["gh"]["clone_url"])
        run(ctx, [
            "git", "remote", "add", "contributor", taskdata["gh"]["clone_url"]
        ],
            cwd=os.path.join(
                SRCDIR,
                repodirname(taskdata["gh"]["repository"]["clone_url"])))
        LOG.info("Fetch contributor's repo")
        run(ctx, ["git", "fetch", "contributor"],
            cwd=os.path.join(
                SRCDIR,
                repodirname(taskdata["gh"]["repository"]["clone_url"])))
    LOG.info("Checkout %s to %s" % (repodirname(
        taskdata["gh"]["repository"]["clone_url"]), taskdata["gh"]["sha"]))
    run(ctx, ["git", "checkout", taskdata["gh"]["sha"]],
        cwd=os.path.join(
            SRCDIR, repodirname(taskdata["gh"]["repository"]["clone_url"])))

    # Fetch sstate if any
    if os.path.exists(get_sstate_archive_path(ctx)):
        with tarfile.open(name=get_sstate_archive_path(ctx),
                          mode="r:gz") as sstate_tar:
            sstate_tar.extractall(path=SRCDIR)

    addlayers = []
    for dep in config["dependencies"]:
        repodir = repodirname(dep["url"])
        layers = dep.get("layers", None)
        if layers:
            addlayers.extend([
                "bitbake-layers add-layer ../%s/%s" % (repodir, layer)
                for layer in layers
            ])
        else:
            addlayers.append("bitbake-layers add-layer ../%s" % repodir)
    addlayers.append("bitbake-layers add-layer ../%s" %
                     repodirname(taskdata["gh"]["repository"]["clone_url"]))

    run_script(ctx,
               BUILDSCRIPT % ("\n".join(addlayers), config["bitbake_target"]),
               cwd=SRCDIR)
    save_sstate(ctx)

    # Github auth token has expired by now most probably => renew
    github = GithubAdapter(taskdata)
    gh_commit = github.get_commit()
    gh_commit.create_status(
        "success",
        target_url=blob_service.make_blob_url("logs", ctx.pid),
        description="Target has been built successfully",
        context=config.get("configuration_name",
                           "configuration%s" % taskdata["config_num"]))
    loop.close()
    # TODO: copy cloud-init log files to share
    taskdata["build_result"] = "success"
    queue_service.put_message(
        "buildresults",
        base64.b64encode(json.dumps(taskdata).encode("utf")).decode("utf"))