Пример #1
0
 def upload_file(self, file_path):
    blob_service = BlobService(self.storage_account, self.account_key)
    try:
        blob_service.create_container("script",x_ms_blob_public_access='blob')
    except Exception,e:
        self.logger.warn(e.message)
        pass
Пример #2
0
class AzureStorage(Storage):
    account_name = settings.AZURE_ACCOUNT_NAME
    account_key = settings.AZURE_ACCOUNT_KEY
    azure_container = settings.AZURE_CONTAINER

    def __init__(self, *args, **kwargs):
        super(AzureStorage, self).__init__(*args, **kwargs)
        self._connection = None

    @property
    def connection(self):
        if self._connection is None:
            # Create connection
            self._connection = BlobService(self.account_name, self.account_key)
            
            # Create container if needed
            containers = [c for c in self._connection.list_containers(prefix=self.azure_container) if c.name == self.azure_container ]
            if len(containers) == 0:
                self._connection.create_container(self.azure_container, {'origin': 'created by Django web app'}, fail_on_exist=True)

        return self._connection

    def _open(self, name, mode="rb"):
        stream = SimpleUploadedFile(name, None)
        self.connection.get_blob_to_file(self.azure_container, name, stream)
        stream.seek(0)
        return stream

    def exists(self, name):
        try:
            self.connection.get_blob_properties(self.azure_container, name)
        except WindowsAzureMissingResourceError:
            return False
        else:
            return True

    def delete(self, name):
        self.connection.delete_blob(self.azure_container, name)

    def size(self, name):
        properties = self.connection.get_blob_properties(self.azure_container, name)
        return properties["content-length"]

    def _save(self, name, content):
        self.connection.put_block_blob_from_file(self.azure_container, name, content)
        return name

    def url(self, name):
        ap = AccessPolicy(expiry=(timezone.datetime.utcnow() + timezone.timedelta(seconds=600)).strftime('%Y-%m-%dT%H:%M:%SZ'), \
                          start=(timezone.datetime.utcnow() + timezone.timedelta(seconds=-600)).strftime('%Y-%m-%dT%H:%M:%SZ'), \
                          permission='r')
        sap = SharedAccessPolicy(ap)
        sas = SharedAccessSignature(self.account_name, self.account_key)
        url = sas.generate_signed_query_string(path=self.azure_container + '/' + name, resource_type='b', shared_access_policy=sap)
        
        return self.connection.make_blob_url(self.azure_container, name) + "?" + sas._convert_query_string(url)
Пример #3
0
 def download_block_blob(self, account, key, container, blobname, path):
     if not os.path.exists(path):
         os.makedirs(path)
     blob_service = BlobService(account_name=account, account_key=key)
     blob = blob_service.get_blob(container, blobname)
     try:
         with open(path + "/" + blobname, 'w') as f:
             f.write(blob)
         return True
     except:
         return False
Пример #4
0
class _BlobStorageFileHandler(object):

    def __init__(self,
                  account_name=None,
                  account_key=None,
                  protocol='https',
                  container='logs',
                  zip_compression=False,
                  max_connections=1,
                  max_retries=5,
                  retry_wait=1.0):
        self.service = BlobService(account_name, account_key, protocol)
        self.container_created = False
        hostname = gethostname()
        self.meta = {'hostname': hostname.replace('_', '-'),
                     'process': os.getpid(),
                     'userid':'1'}
        self.container = (container % self.meta).lower()
        self.meta['hostname'] = hostname
        self.zip_compression = zip_compression
        self.max_connections = max_connections
        self.max_retries = max_retries
        self.retry_wait = retry_wait

    def put_file_into_storage(self, dirName, fileName):
        """
        Ship the outdated log file to the specified blob container.
        """
        if not self.container_created:
            self.service.create_container(self.container)
            self.container_created = True
        fd, tmpfile_path = None, ''
        try:
            file_path = os.path.join(dirName, fileName)
            if self.zip_compression:
                suffix, content_type = '.zip', 'application/zip'
                fd, tmpfile_path = mkstemp(suffix=suffix)
                with os.fdopen(fd, 'wb') as f:
                    with ZipFile(f, 'w', ZIP_DEFLATED) as z:
                        z.write(file_path, arcname=fileName)
                file_path = tmpfile_path
            else:
                suffix, content_type = '', 'text/plain'
            self.service.put_block_blob_from_path(self.container,
                                                  fileName + suffix,
                                                  file_path,
                                                  x_ms_blob_content_type=content_type,
                                                  max_connections=self.max_connections,
                                                  max_retries=self.max_retries,
                                                  retry_wait=self.retry_wait)
        finally:
            if self.zip_compression and fd:
                os.remove(tmpfile_path)
Пример #5
0
 def list_storage_container(self, storage_account, storage_key):
     try:
         container_names = []
         self.logger.info("Listing storage containers")
         blob_service = BlobService(account_name=storage_account,
                                    account_key=storage_key)
         result = blob_service.list_containers(None, None, 10, None)
         for i in range(len(result.containers)):
             container_names.append(result.containers[i].name)
         return container_names
     except WindowsAzureConflictError:
         self.logger.info("Error: Can not delete storage container: " +
                          container_name)
         sys.exit(1)
Пример #6
0
 def delete_containers(self, container_name, storage_account, storage_key):
     try:
         self.logger.info("Container Name: " + container_name)
         self.logger.info("Storage account: " + storage_account)
         self.logger.info("Storage key: " + storage_key)
         blob_service = BlobService(account_name=storage_account,
                                    account_key=storage_key)
         self.logger.info("creating Blob Service connection")
         result = blob_service.delete_container(container_name, False)
         return result
     except WindowsAzureConflictError:
         self.logger.info("Error: Can not delete storage container: " +
                          container_name)
         sys.exit(1)
Пример #7
0
    def handle_noargs(self, **options):
        self.set_options(**options)

        if not self.disable:
            if not ls.AZURE_ACCOUNT_NAME:
                raise CommandError('AZURE_ACCOUNT_NAME setting is missing')
            if not ls.AZURE_ACCOUNT_KEY:
                raise CommandError('AZURE_ACCOUNT_KEY setting is missing')
            if not self.origins:
                raise CommandError('Specify at least one origin')
            if not self.methods:
                raise CommandError('Specify at least one method')

        class CorsRule(WindowsAzureData):
            def __init__(self, origins, methods, maxage):
                self.allowed_origins = ','.join(origins)
                self.allowed_methods = ','.join(methods)
                self.allowed_headers = ''
                self.exposed_headers = ''
                self.max_age_in_seconds = maxage

        class Cors(WindowsAzureData):
            def __init__(self, rules):
                self.cors = rules

        blob_service = BlobService(ls.AZURE_ACCOUNT_NAME, ls.AZURE_ACCOUNT_KEY,
                                   ls.AZURE_DEFAULT_PROTOCOL)

        cors_rule = CorsRule(self.origins, self.methods, self.maxage)
        service_properties = blob_service.get_blob_service_properties()
        self.stdout.write('--FOUND PROPERTIES--')
        self.stdout.write(_convert_class_to_xml(service_properties))

        cors_properties = StorageServiceProperties()
        if not self.disable:
            cors_properties.cors = Cors([cors_rule])
        else:
            cors_properties.cors = Cors([])

        cors_properties.metrics = None
        cors_properties.logging = None
        self.stdout.write('')
        self.stdout.write('--NEW PROPERTIES--')
        self.stdout.write(_convert_class_to_xml(cors_properties))

        # As of the latest version, one can only send 
        # a part of the properties and the rest will stay intact
        # http://msdn.microsoft.com/en-us/library/azure/hh452235.aspx
        self.set_properties(blob_service, cors_properties)
Пример #8
0
    def handle_noargs(self, **options):
        self.set_options(**options)

        if not self.disable:
            if not ls.AZURE_ACCOUNT_NAME:
                raise CommandError('AZURE_ACCOUNT_NAME setting is missing')
            if not ls.AZURE_ACCOUNT_KEY:
                raise CommandError('AZURE_ACCOUNT_KEY setting is missing')
            if not self.origins:
                raise CommandError('Specify at least one origin')
            if not self.methods:
                raise CommandError('Specify at least one method')

        class CorsRule(WindowsAzureData):
            def __init__(self, origins, methods, maxage):
                self.allowed_origins = ','.join(origins)
                self.allowed_methods = ','.join(methods)
                self.allowed_headers = ''
                self.exposed_headers = ''
                self.max_age_in_seconds = maxage

        class Cors(WindowsAzureData):
            def __init__(self, rules):
                self.cors = rules

        blob_service = BlobService(ls.AZURE_ACCOUNT_NAME, ls.AZURE_ACCOUNT_KEY,
                                   ls.AZURE_DEFAULT_PROTOCOL)

        cors_rule = CorsRule(self.origins, self.methods, self.maxage)
        service_properties = blob_service.get_blob_service_properties()
        self.stdout.write('--FOUND PROPERTIES--')
        self.stdout.write(_convert_class_to_xml(service_properties))

        cors_properties = StorageServiceProperties()
        if not self.disable:
            cors_properties.cors = Cors([cors_rule])
        else:
            cors_properties.cors = Cors([])

        cors_properties.metrics = None
        cors_properties.logging = None
        self.stdout.write('')
        self.stdout.write('--NEW PROPERTIES--')
        self.stdout.write(_convert_class_to_xml(cors_properties))

        # As of the latest version, one can only send
        # a part of the properties and the rest will stay intact
        # http://msdn.microsoft.com/en-us/library/azure/hh452235.aspx
        self.set_properties(blob_service, cors_properties)
Пример #9
0
 def __init__(self,
              filename,
              when='h',
              interval=1,
              encoding=None,
              delay=False,
              utc=False,
              account_name=None,
              account_key=None,
              protocol='https',
              container='logs',
              ):
     hostname = gethostname()
     self.meta = {'hostname': hostname, 'process': os.getpid()}
     s = super(BlobStorageTimedRotatingFileHandler, self)
     s.__init__(filename % self.meta,
                when=when,
                interval=interval,
                backupCount=1,
                encoding=encoding,
                delay=delay,
                utc=utc)
     self.service = BlobService(account_name, account_key, protocol)
     self.container_created = False
     self.meta['hostname'] = hostname.replace('_', '-')
     container = container % self.meta
     self.container = container.lower()
     self.meta['hostname'] = hostname
Пример #10
0
 def attachDiskToInstance(self, diskSize, vmname, disklabel,
                          storage_account, storage_key, container_name,
                          blob_name):
     # Create Page Blob:
     blob_service = BlobService(account_name=storage_account,
                                account_key=storage_key)
     #blob_service.put_blob(container_name, blob_name, b'', 'PageBlob', x_ms_blob_content_length='1073741824')
     #url		= "http://"+storage_account+".blob.core.windows.net/"+container_name+"/"+blob_name
     #medialink		= "http://"+storage_account+".blob.core.windows.net/"+container_name+"/nfsnew.vhd"
     url = "http://elastichpc.blob.core.windows.net/ahmed-sate931/ahmed.vhd"
     lun = 5
     label = 'diskdata' + str(lun)
     diskname = label
     sms = self.CheckSubscription(
         '8be5609b-07c9-4114-8865-921ad82cb64a',
         '/media/ahmed/92b488cc-077b-480a-8a6c-62e6fd95339b/elastichpc/ehpc_azure/keys/mycert.pem'
     )
     os = "Linux"
     result = sms.add_disk(False, label, url, 'disk1ahmed', 'Linux')
     self.wait_for_async(sms, result.request_id)
     result = sms.list_disks()
     for i in range(len(result.__dict__['disks'])):
         print "\n(" + str(i) + ")\n"
         print result.__dict__['disks'][i].__dict__['name']
         print result.__dict__['disks'][i].__dict__['media_link']
         print result.__dict__['disks'][i].__dict__['os']
         print "------------------------------------------"
Пример #11
0
 def create_containers(self, sms, storage_account, storage_key,
                       container_name, permission):
     try:
         self.logger.info("Storage account: " + storage_account)
         self.logger.info("Container Name: " + container_name)
         self.logger.info("permission: " + str(permission))
         blob_service = BlobService(account_name=storage_account,
                                    account_key=storage_key)
         self.logger.info("creating Blob Service connection")
         blob_service.create_container(container_name, None, permission,
                                       False)
         self.logger.info("creating container: %s", container_name)
         return container_name
     except WindowsAzureConflictError:
         self.logger.info(
             "Error: can not create storage container with name %s ",
             container_name)
         sys.exit(1)
Пример #12
0
class _BlobStorageTestCase(_TestCase):

    def _get_container_name(self, handler_name):
        container = _get_handler_config_value(handler_name, 'container')
        if container:
            container = container.replace('_', '-').lower()
        return container

    def setUp(self):
        self.service = BlobService(ACCOUNT_NAME, ACCOUNT_KEY)
        # ensure that there's no log file in the container before each test
        containers = [c.name for c in self.service.list_containers()]
        for handler in LOGGING['handlers']:
            container = self._get_container_name(handler)
            if container in containers:
                filename = _get_handler_config_value(handler, 'filename')
                basename = os.path.basename(filename)
                for blob in self.service.list_blobs(container, prefix=basename):
                    self.service.delete_blob(container, blob.name)
Пример #13
0
 def setUp(self):
     self.service = BlobService(ACCOUNT_NAME, ACCOUNT_KEY)
     # ensure that there's no log file in the container before each test
     containers = [c.name for c in self.service.list_containers()]
     for handler in LOGGING['handlers']:
         container = self._get_container_name(handler)
         if container in containers:
             filename = _get_handler_config_value(handler, 'filename')
             basename = os.path.basename(filename)
             for blob in self.service.list_blobs(container, prefix=basename):
                 self.service.delete_blob(container, blob.name)
Пример #14
0
    def connection(self):
        if self._connection is None:
            # Create connection
            self._connection = BlobService(self.account_name, self.account_key)
            
            # Create container if needed
            containers = [c for c in self._connection.list_containers(prefix=self.azure_container) if c.name == self.azure_container ]
            if len(containers) == 0:
                self._connection.create_container(self.azure_container, {'origin': 'created by Django web app'}, fail_on_exist=True)

        return self._connection
Пример #15
0
 def upload_block_blob(self, container_name, blob_name, file_path,
                       storage_account, storage_key):
     result = self.list_storage_container(storage_account, storage_key)
     found = False
     blob_service = BlobService(account_name=storage_account,
                                account_key=storage_key)
     for name in result:
         if name == container_name:
             found = True
     if found:
         self.logger.info("container is already exist")
     else:
         blob_service.create_container(container_name, None, None, False)
     blob_service.put_blob(container_name, blob_name, '', 'BlockBlob')
     data_sent = 0
     sent = 0
     block_ids = []
     index = 0
     with open(file_path, 'rb') as f:
         while True:
             data = f.read(self.chunk_size)
             if data:
                 length = len(data)
                 block_id = base64.b64encode(str(index))
                 blob_service.put_block(container_name, blob_name, data,
                                        block_id)
                 block_ids.append(block_id)
                 index += 1
                 data_sent += self.chunk_size
                 sent = data_sent / (1024 * 1024)
                 sys.stdout.write("\rUploaded data = %d MB" % sent)
                 sys.stdout.flush()
             else:
                 print "\n"
                 break
     blob_service.put_block_list(container_name, blob_name, block_ids)
Пример #16
0
    def __init__(self, 
                 input_folder, 
                 output_folder = 'output',
                 overwrite_output = False,
                 remove_html_ext = True,
                 overwrite_container = False,
                 account_name = None, 
                 account_key = None, 
                 container_name = DEVSTORE_CONTAINER_NAME):
        '''
        Constructor function. Creates a new container for blobs under the specified account. 
        If the container with the same name already exists, delete it if overwrite_container is true.

        input_folder: The folder contains all the resources of the static website.
        output_folder: The folder contains all the resources uploaded.
        overwrite_output: Overwrites the output_folder anyway. 
        remove_html_ext: Removes the .htm/.html in the url.
        overwrite_container: Deletes the existing container.
        account_name: Optional. Your storage account name, DEVSTORE_ACCOUNT_NAME is used if None.
        account_key: Optional. Your storage account key, DEVSTORE_ACCOUNT_KEY is used if None.
        container_name: Optional. Container name, DEVSTORE_CONTAINER_NAME is used if None.
        '''
        self.input_folder = os.path.abspath(input_folder).lower()
        self.output_folder = os.path.abspath(output_folder).lower()
        self.overwrite_output = overwrite_output
        self.remove_html_ext = remove_html_ext
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = container_name

        self.full_path_blob_name_dict = self.list_full_path_with_blob_name()

        if not account_name or not account_key:
            os.environ['EMULATED'] = 'true'
        else:
            os.environ['EMULATED'] = 'false'
 
        self.blob_service = BlobService(self.account_name, self.account_key)
        if overwrite_container:
            self.blob_service.delete_container(container_name)
            
        self.blob_service.create_container(container_name, x_ms_blob_public_access = 'container')
Пример #17
0
 def __init__(self,
               account_name=None,
               account_key=None,
               protocol='https',
               container='logs',
               zip_compression=False,
               max_connections=1,
               max_retries=5,
               retry_wait=1.0):
     self.service = BlobService(account_name, account_key, protocol)
     self.container_created = False
     hostname = gethostname()
     self.meta = {'hostname': hostname.replace('_', '-'),
                  'process': os.getpid(),
                  'userid':'1'}
     self.container = (container % self.meta).lower()
     self.meta['hostname'] = hostname
     self.zip_compression = zip_compression
     self.max_connections = max_connections
     self.max_retries = max_retries
     self.retry_wait = retry_wait
Пример #18
0
    def __get_available_storage_account_and_container(self, hackathon_id):
        """
        Get available storage account and container

        :param hackathon_id: the id of hackathon
        :type hackathon_id: integer

        :return: if there is available storage account and container, then return (True, storage
                 account name, container name). Otherwise, return (False, None, None)
        :rtype: 3-element tuple: (bool, str|unicode, str|unicode)
        """
        container_name = self.util.safe_get_config('dockerhostserver.azure.container', 'dockerhostprivatecontainer')
        sms = self.__get_sms_object(hackathon_id)
        if sms is None:
            self.log.error('Something wrong with Azure account of Hackathon:%d' % hackathon_id)
            return False, None, None
        storage_accounts = sms.list_storage_accounts()
        # check storage account one by one, return True once find a qualified one
        for storage in storage_accounts.storage_services:
            try:
                storage_response = sms.get_storage_account_keys(storage.service_name)
            except Exception as e:
                self.log.error('Encounter an error when checking storage_account:%s ' % storage.service_name)
                self.log.error(e)
                continue
            blob_service = BlobService(account_name=storage.service_name,
                                       account_key=storage_response.storage_service_keys.primary,
                                       host_base=self.util.safe_get_config('dockerhostserver.storage.host_base',
                                                                           '.blob.core.chinacloudapi.cn'))
            try:
                blob_service.get_container_metadata(container_name)
                return True, storage.service_name, container_name
            except Exception as e:
                if e.message != AzureApiExceptionMessage.CONTAINER_NOT_FOUND:
                    self.log.error('Encounter an error when checking container:%s ' % container_name)
                    self.log.error(e)
                    continue
            try:
                blob_service.create_container(container_name=container_name, x_ms_blob_public_access='container')
                return True, storage.service_name, container_name
            except Exception as e:
                self.log.error('Encounter an error when creating container:%s ' % container_name)
                self.log.error(e)
        return False, None, None
Пример #19
0
 def connect(self, creds):
     """Return an azure BlobService instance.
     """
     return BlobService(account_name=creds.account_name,
                        account_key=creds.account_key,
                        protocol='https')
Пример #20
0
 def service(self):
     if self._service is None:
         self._service = BlobService(self.AZURE_ACCOUNT_NAME,
                                     self.AZURE_ACCOUNT_KEY,
                                     self.AZURE_PROTOCOL, self.AZURE_DOMAIN)
     return self._service
Пример #21
0
class SitesToAzureBlob:
    """
    Class contains functions to upload a static website, which is loacated in a folder, 
    to Windows Azure Blob storage service. 
    """
    def __init__(self, 
                 input_folder, 
                 output_folder = 'output',
                 overwrite_output = False,
                 remove_html_ext = True,
                 overwrite_container = False,
                 account_name = None, 
                 account_key = None, 
                 container_name = DEVSTORE_CONTAINER_NAME):
        '''
        Constructor function. Creates a new container for blobs under the specified account. 
        If the container with the same name already exists, delete it if overwrite_container is true.

        input_folder: The folder contains all the resources of the static website.
        output_folder: The folder contains all the resources uploaded.
        overwrite_output: Overwrites the output_folder anyway. 
        remove_html_ext: Removes the .htm/.html in the url.
        overwrite_container: Deletes the existing container.
        account_name: Optional. Your storage account name, DEVSTORE_ACCOUNT_NAME is used if None.
        account_key: Optional. Your storage account key, DEVSTORE_ACCOUNT_KEY is used if None.
        container_name: Optional. Container name, DEVSTORE_CONTAINER_NAME is used if None.
        '''
        self.input_folder = os.path.abspath(input_folder).lower()
        self.output_folder = os.path.abspath(output_folder).lower()
        self.overwrite_output = overwrite_output
        self.remove_html_ext = remove_html_ext
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = container_name

        self.full_path_blob_name_dict = self.list_full_path_with_blob_name()

        if not account_name or not account_key:
            os.environ['EMULATED'] = 'true'
        else:
            os.environ['EMULATED'] = 'false'
 
        self.blob_service = BlobService(self.account_name, self.account_key)
        if overwrite_container:
            self.blob_service.delete_container(container_name)
            
        self.blob_service.create_container(container_name, x_ms_blob_public_access = 'container')
       
    def upload_files_to_blob(self):
        '''
        Uploads the files to the blob.

        full_path_blob_name_dict: A dictionary whose key is the full_path of the file and the value is the blob_name.
        '''

        #if self.remove_html_ext:
        #    for blob_name in full_path_blob_name_dict.values():
        #        file_name, ext = os.path.splitext(blob_name)
        #        if ext == '.html' or ext == '.htm':
        #            self.html_blob_name_list.append(blob_name)
        
        curdir = os.getcwd()

        for full_path, blob_name in self.full_path_blob_name_dict.iteritems():
            output_path = os.path.join(self.output_folder, blob_name)
            if not os.path.exists(os.path.dirname(output_path)): 
                os.makedirs(os.path.dirname(output_path))
            if self.overwrite_output is False and os.path.exists(output_path):
                if filecmp.cmp(full_path, output_path):
                    print blob_name + ' skips...'
                    continue

            print blob_name + ' is uploading...'

            file_name, ext = os.path.splitext(blob_name)

            file_blob = open(full_path, 'rb').read()
            content_type = self.fetch_content_type(ext)

            if ext == '.htm' or ext == '.html':
                if self.remove_html_ext:
                    blob_name = file_name
                os.chdir(os.path.split(full_path)[0])
                file_blob = self.adjust_url_links(file_blob)

            self.blob_service.put_blob(self.container_name, blob_name, file_blob, x_ms_blob_type = 'BlockBlob', x_ms_blob_content_type = content_type)
            shutil.copy(full_path, os.path.dirname(output_path))

        os.chdir(curdir)
        print 'Done'

    def list_full_path_with_blob_name(self):
        '''
        Fetches the full_path as key and blob_name as value into a dictionary.
        '''
        dict = {}
        for root, dirs, files in os.walk(self.input_folder):
            for fi in files:
                full_path = os.path.abspath(os.path.join(root, fi)).lower()
                blob_name = self.list_blob_name(full_path)
                dict[full_path] = blob_name.replace('\\', '/') # To replace the Windows backslash \ in the blob_name with /.
        return dict

    def url_rep(self, matchobj):
        '''
        This is called for every non-overlapping occurrence of pattern: href|src=[\'"]?([^\'" >]+).
        '''
        url_blob_name = self.list_blob_name(os.path.abspath(matchobj.group(2))).replace('\\', '/')
        if url_blob_name in self.full_path_blob_name_dict.values():
            file_name, ext = os.path.splitext(matchobj.group(2))
            if self.remove_html_ext and ext == '.html' or ext == '.htm': 
                return matchobj.group(1) + r'="' + file_name + '"'
            else:
                return matchobj.group(0)
        else:
            return matchobj.group(0)


    def adjust_url_links(self, file_content):
        '''
        Adjusts the urls in href and src attributes.
        Removes the .html/.htm extension of the linked html files in the file_content if needed.

        file_content: the content of the html file
        '''
        file_content = re.sub(r'(href|src)=[\'"]?([^\'" >]+)', self.url_rep, file_content)

        '''
        Problem with using BeautifulSoup. It cannot preserve the '<', '>' in the <script type="text/template"...>
        '''
        #html = soup(file_content)
        #for tag in html.findAll('a', {'href': True}):
        #    href = tag['href']
        #    if href in html_blob_name_list:
        #        tag['href'] = os.path.splitext(href)[0]
        #return str(html)

        return file_content


    def list_blob_name(self, full_path):
        '''
        Gets the file path name in the input_folder for blob storage.
        If we uploaded from a subfolder (such as /search), we must rename blobs to have the 'folder/' prefix in their name. 
        For example, if we uploaded index.html from search subfolder, rename the blob from 'index.html' to 'search/index.html'.
        '''
        name = full_path.lower()
        name = name.replace(self.input_folder, '')
        if re.match('[A-Za-z0-9_-]', name[0]) is None:
            name=name[1:] 
        return name
    
    def fetch_content_type(self, extension_name):
        '''
        Fetches the content type from the extension name.
        '''
        return {
            '.png': 'image/png',
            '.gif': 'image/gif',
            '.jpg':'image/jpg',
            '.jpeg':'image/jpeg',             
            '.mp3':'audio/mp3',            
            '.jar':'application/java-archive',                
            '.zip': 'application/zip',              
            '.htm': 'text/htm',                 
            '.html': 'text/html',                 
            '.js': 'application/javascript',             
            '.txt': 'text/plain',         
            '.css': 'text/css',
            '.xml':'text/xml',
            '.pdf':'application/pdf',
            '.json':'application/json'
            }.get(extension_name, None)    # None is default if extensionName not found       
Пример #22
0
class BlobStorageTimedRotatingFileHandler(TimedRotatingFileHandler):
    """
    Handler for logging to a file, rotating the log file at certain timed
    intervals.

    The outdated log file is shipped to the specified Windows Azure Storage
    blob container and removed from the local file system immediately.
    """

    def __init__(self,
                 filename,
                 when='h',
                 interval=1,
                 encoding=None,
                 delay=False,
                 utc=False,
                 account_name=None,
                 account_key=None,
                 protocol='https',
                 container='logs',
                 ):
        hostname = gethostname()
        self.meta = {'hostname': hostname, 'process': os.getpid()}
        s = super(BlobStorageTimedRotatingFileHandler, self)
        s.__init__(filename % self.meta,
                   when=when,
                   interval=interval,
                   backupCount=1,
                   encoding=encoding,
                   delay=delay,
                   utc=utc)
        self.service = BlobService(account_name, account_key, protocol)
        self.container_created = False
        self.meta['hostname'] = hostname.replace('_', '-')
        container = container % self.meta
        self.container = container.lower()
        self.meta['hostname'] = hostname

    def _put_log(self, dirName, fileName):
        """
        Ship the outdated log file to the specified blob container.
        """
        if not self.container_created:
            self.service.create_container(self.container)
            self.container_created = True
        with open(os.path.join(dirName, fileName), mode='rb') as f:
            self.service.put_blob(self.container,
                                  fileName,
                                  f.read(),
                                  'BlockBlob',
                                  x_ms_blob_content_type='text/plain',
                                  )

    def emit(self, record):
        """
        Emit a record.

        Output the record to the file, catering for rollover as described
        in doRollover().
        """
        record.hostname = self.meta['hostname']
        super(BlobStorageTimedRotatingFileHandler, self).emit(record)

    def getFilesToDelete(self):
        """
        Determine the files to delete when rolling over.
        """
        dirName, baseName = os.path.split(self.baseFilename)
        fileNames = os.listdir(dirName)
        result = []
        prefix = baseName + "."
        plen = len(prefix)
        for fileName in fileNames:
            if fileName[:plen] == prefix:
                suffix = fileName[plen:]
                if self.extMatch.match(suffix):
                    self._put_log(dirName, fileName)
                    result.append(os.path.join(dirName, fileName))
        # delete the stored log file from the local file system immediately
        return result
Пример #23
0
 def service(self):
     if self._service is None:
         self._service = BlobService(account_name=self.account_name,
                                     account_key=self.account_key,
                                     protocol=self.protocol)
     return self._service
Пример #24
0
 def create_blob_service(self):
     return BlobService(self.account_name, self.account_key)
Пример #25
0
    def createInstances(self, sms, name, region, imageID, instanceSize, pkfile,
                        count, thumbprint, cert_data, num_instances,
                        certPasswd, storagename, master_size):
        hostname = name
        mainPath = os.path.dirname(os.path.abspath(__file__))
        username = "******"
        password = None
        affGrp = self.createAffinityGrp(sms, storagename, storagename,
                                        storagename, region)
        storage_name = self.StorageCreate(sms, storagename, storagename,
                                          storagename, region, affGrp)
        account_key = self.getStorageAccountKey(sms, storage_name)
        permission = None
        container_name = self.get_vm_name()
        account_name = storage_name
        container_name = self.create_containers(sms, storage_name, account_key,
                                                container_name.lower(), None)
        time.sleep(5)
        #print "Container Name:"+disks
        medialink = "http://" + storage_name + ".blob.core.windows.net/" + container_name + "/"
        # --------------------------------------------------------
        blobService = BlobService(account_name=storage_name,
                                  account_key=account_key)
        blobName = container_name + "blob.vhd"
        try:
            image = sms.get_os_image(imageID)
        except:
            if (self.copy_image_vmDepot(blobService, container_name, blobName,
                                        imageID, sms)):
                print "INFO -- The Disk Blob has been copied"
            media_link = "http://" + storage_name + ".blob.core.windows.net/" + container_name + "/" + blobName
            if (self.make_os_image(sms, media_link, imageID)):
                print "INFO -- The image '" + imageID + "' is ready now!!"

            else:
                print "Error: Can not complete creating The image"
                exit(0)

        #-----------------------------------------------------------
        medialink = "http://" + storage_name + ".blob.core.windows.net/" + container_name + "/"
        media_link = ""
        # Configuring EndPoint "Firwall Configuration":
        endpoint_config = ConfigurationSet()
        endpoint_config.configuration_set_type = 'NetworkConfiguration'
        endpoint1 = ConfigurationSetInputEndpoint(
            name='XML',
            protocol='tcp',
            port='5000',
            local_port='5000',
            load_balanced_endpoint_set_name=None,
            enable_direct_server_return=False)
        endpoint_config.input_endpoints.input_endpoints.append(endpoint1)
        self.logger.info(
            "Configuring EndPoints 'Firwall Configuration' SHH, PBS Torque and OpenMPI ports"
        )
        # Linux VM Configuration:
        linux_config = LinuxConfigurationSet(hostname, username, password,
                                             True)
        publickey = PublicKey(thumbprint, pkfile)
        linux_config.ssh.public_keys.public_keys.append(publickey)
        self.logger.info("Linux VM Configuration")
        # Configuring Image ID:
        #----------------------
        os_hd = OSVirtualHardDisk(imageID, media_link)
        self.logger.info(
            "Configuring The Virtual Hard Disk using Image ID: %s", imageID)

        # Start Deployment of VM on Azure:
        self.logger.info("Start Deployment of Elastic hpc on Azure")

        # Configuring Certificates:
        cert_format = 'pfx'
        cert_password = certPasswd
        VMname = hostname
        vmname = hostname
        instances = []

        try:
            for num in range(count):
                name = vmname + str(num)
                vname = vmname
                Service_name = vname
                Service_url = self.newCloudService(sms, Service_name,
                                                   Service_name, Service_name,
                                                   region, affGrp)
                endpoint3 = ConfigurationSetInputEndpoint(
                    name='SSH' + str(num),
                    protocol='tcp',
                    port='220' + str(num),
                    local_port='22',
                    load_balanced_endpoint_set_name=None,
                    enable_direct_server_return=False)
                endpoint_config.input_endpoints.input_endpoints.append(
                    endpoint3)
                #endpoint4 = ConfigurationSetInputEndpoint(name="FTP", protocol='tcp', port='21', local_port='21', load_balanced_endpoint_set_name=None, enable_direct_server_return=False)
                #endpoint_config.input_endpoints.input_endpoints.append(endpoint4)
                #endpoint5 = ConfigurationSetInputEndpoint(name="FTP1", protocol='tcp', port='20', local_port='20', load_balanced_endpoint_set_name=None, enable_direct_server_return=False)
                #endpoint_config.input_endpoints.input_endpoints.append(endpoint5)
                #endpoint6 = ConfigurationSetInputEndpoint(name="FTPudp", protocol='udp', port='21', local_port='21', load_balanced_endpoint_set_name=None, enable_direct_server_return=False)
                #endpoint_config.input_endpoints.input_endpoints.append(endpoint6)
                #for i in range(6):
                #	endpointpasv = ConfigurationSetInputEndpoint(name="FTPpasv"+str(i), protocol='tcp', port='4000'+str(i), local_port='4000'+str(i), load_balanced_endpoint_set_name=None, enable_direct_server_return=False)
                #	endpoint_config.input_endpoints.input_endpoints.append(endpointpasv)
                pbs_endpoints = self.get_pbs_endpoints(0)
                for endpoint in pbs_endpoints:
                    endpoint_config.input_endpoints.input_endpoints.append(
                        endpoint)
                media_link = medialink + name[:-1] + ".vhd"
                self.logger.info("Configuring Media Link %s", media_link)
                # Configuring Image ID:
                #----------------------
                os_hd = OSVirtualHardDisk(imageID, media_link)
                self.logger.info(
                    "Configuring The Virtual Hard Disk using Image ID: %s",
                    imageID)
                self.logger.info("Deploying Node number: %d", num)
                result_cert = sms.add_service_certificate(
                    service_name=Service_name,
                    data=cert_data,
                    certificate_format=cert_format,
                    password=cert_password)
                self.logger.info("Start Deploying VM with Name: " + vname)
                try:
                    self.logger.info(vars(result_cert))
                except:
                    self.logger.info(result_cert)
                time.sleep(5)
                result = sms.create_virtual_machine_deployment(
                    service_name=Service_name,
                    deployment_name=vname,
                    deployment_slot='production',
                    label=Service_name,
                    role_name=vname,
                    system_config=linux_config,
                    os_virtual_hard_disk=os_hd,
                    network_config=endpoint_config,
                    role_size=master_size)
                #role_size="Large")
                self.logger.info("Start Deployment")
                self.wait_for_async(sms, result.request_id)
                self.wait_for_deployment_status(sms, Service_name, vname,
                                                'Running')
                self.wait_for_role_instance_status(sms, Service_name, vname,
                                                   vname, 'ReadyRole')
                instances.append(Service_name + ".cloudapp.net")
                instances.append(container_name)
        except WindowsAzureConflictError:
            self.logger.info("Error: Can not Create VM")
            sys.exit(1)
        return instances
Пример #26
0
import logging
from azure.storage.blobservice import BlobService
from azure.storage.storageclient import _StorageClient
from azure.servicemanagement import LinuxConfigurationSet
from azure.servicemanagement import OSVirtualHardDisk
from azure import WindowsAzureConflictError
from azure import WindowsAzureError
from azure import *
from azure.servicemanagement import *
from azure.servicemanagement.servicemanagementservice import ServiceManagementService
import base64
import urllib2

blob_service = BlobService(
    account_name='sjsssiohdsiu',
    account_key=
    'tBlxaMOoA+bE6zMsT5i1epphb25V/sD62MpAO7UA0fRK0GbpYLiVwhpe+WIwCtg80XxC+1uaVDtOvopZRHQ3Nw=='
)
#myblob = open(r'vmdepoteastus.blob.core.windows.net/linux-community-store/community-23970-525c8c75-8901-4870-a937-7277414a6eaa-1.vhd', 'r').read()
#blob_service.put_blob('mycontainerazure','ahmedblob.vhd',myblob,x_ms_blob_type='BlockBlob')
#cert_data_path = "/media/ahmed/1578-4B0E/WindowsAzure/AzureCert/mycert.pfx"
#with open(cert_data_path, "rb") as bfile:
#    cert_data = base64.b64encode(bfile.read())

log = logging.getLogger()
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler(sys.__stdout__)  # Add this
ch.setLevel(logging.DEBUG)
# create formatter
formatter = logging.Formatter(
    '%(asctime)s - %(name)s - %(levelname)s - %(message)s')