def upload_file(self, file_path): blob_service = BlobService(self.storage_account, self.account_key) try: blob_service.create_container("script",x_ms_blob_public_access='blob') except Exception,e: self.logger.warn(e.message) pass
class AzureStorage(Storage): account_name = settings.AZURE_ACCOUNT_NAME account_key = settings.AZURE_ACCOUNT_KEY azure_container = settings.AZURE_CONTAINER def __init__(self, *args, **kwargs): super(AzureStorage, self).__init__(*args, **kwargs) self._connection = None @property def connection(self): if self._connection is None: # Create connection self._connection = BlobService(self.account_name, self.account_key) # Create container if needed containers = [c for c in self._connection.list_containers(prefix=self.azure_container) if c.name == self.azure_container ] if len(containers) == 0: self._connection.create_container(self.azure_container, {'origin': 'created by Django web app'}, fail_on_exist=True) return self._connection def _open(self, name, mode="rb"): stream = SimpleUploadedFile(name, None) self.connection.get_blob_to_file(self.azure_container, name, stream) stream.seek(0) return stream def exists(self, name): try: self.connection.get_blob_properties(self.azure_container, name) except WindowsAzureMissingResourceError: return False else: return True def delete(self, name): self.connection.delete_blob(self.azure_container, name) def size(self, name): properties = self.connection.get_blob_properties(self.azure_container, name) return properties["content-length"] def _save(self, name, content): self.connection.put_block_blob_from_file(self.azure_container, name, content) return name def url(self, name): ap = AccessPolicy(expiry=(timezone.datetime.utcnow() + timezone.timedelta(seconds=600)).strftime('%Y-%m-%dT%H:%M:%SZ'), \ start=(timezone.datetime.utcnow() + timezone.timedelta(seconds=-600)).strftime('%Y-%m-%dT%H:%M:%SZ'), \ permission='r') sap = SharedAccessPolicy(ap) sas = SharedAccessSignature(self.account_name, self.account_key) url = sas.generate_signed_query_string(path=self.azure_container + '/' + name, resource_type='b', shared_access_policy=sap) return self.connection.make_blob_url(self.azure_container, name) + "?" + sas._convert_query_string(url)
class _BlobStorageFileHandler(object): def __init__(self, account_name=None, account_key=None, protocol='https', container='logs', zip_compression=False, max_connections=1, max_retries=5, retry_wait=1.0): self.service = BlobService(account_name, account_key, protocol) self.container_created = False hostname = gethostname() self.meta = {'hostname': hostname.replace('_', '-'), 'process': os.getpid(), 'userid':'1'} self.container = (container % self.meta).lower() self.meta['hostname'] = hostname self.zip_compression = zip_compression self.max_connections = max_connections self.max_retries = max_retries self.retry_wait = retry_wait def put_file_into_storage(self, dirName, fileName): """ Ship the outdated log file to the specified blob container. """ if not self.container_created: self.service.create_container(self.container) self.container_created = True fd, tmpfile_path = None, '' try: file_path = os.path.join(dirName, fileName) if self.zip_compression: suffix, content_type = '.zip', 'application/zip' fd, tmpfile_path = mkstemp(suffix=suffix) with os.fdopen(fd, 'wb') as f: with ZipFile(f, 'w', ZIP_DEFLATED) as z: z.write(file_path, arcname=fileName) file_path = tmpfile_path else: suffix, content_type = '', 'text/plain' self.service.put_block_blob_from_path(self.container, fileName + suffix, file_path, x_ms_blob_content_type=content_type, max_connections=self.max_connections, max_retries=self.max_retries, retry_wait=self.retry_wait) finally: if self.zip_compression and fd: os.remove(tmpfile_path)
def create_containers(self, sms, storage_account, storage_key, container_name, permission): try: self.logger.info("Storage account: " + storage_account) self.logger.info("Container Name: " + container_name) self.logger.info("permission: " + str(permission)) blob_service = BlobService(account_name=storage_account, account_key=storage_key) self.logger.info("creating Blob Service connection") blob_service.create_container(container_name, None, permission, False) self.logger.info("creating container: %s", container_name) return container_name except WindowsAzureConflictError: self.logger.info( "Error: can not create storage container with name %s ", container_name) sys.exit(1)
def __get_available_storage_account_and_container(self, hackathon_id): """ Get available storage account and container :param hackathon_id: the id of hackathon :type hackathon_id: integer :return: if there is available storage account and container, then return (True, storage account name, container name). Otherwise, return (False, None, None) :rtype: 3-element tuple: (bool, str|unicode, str|unicode) """ container_name = self.util.safe_get_config('dockerhostserver.azure.container', 'dockerhostprivatecontainer') sms = self.__get_sms_object(hackathon_id) if sms is None: self.log.error('Something wrong with Azure account of Hackathon:%d' % hackathon_id) return False, None, None storage_accounts = sms.list_storage_accounts() # check storage account one by one, return True once find a qualified one for storage in storage_accounts.storage_services: try: storage_response = sms.get_storage_account_keys(storage.service_name) except Exception as e: self.log.error('Encounter an error when checking storage_account:%s ' % storage.service_name) self.log.error(e) continue blob_service = BlobService(account_name=storage.service_name, account_key=storage_response.storage_service_keys.primary, host_base=self.util.safe_get_config('dockerhostserver.storage.host_base', '.blob.core.chinacloudapi.cn')) try: blob_service.get_container_metadata(container_name) return True, storage.service_name, container_name except Exception as e: if e.message != AzureApiExceptionMessage.CONTAINER_NOT_FOUND: self.log.error('Encounter an error when checking container:%s ' % container_name) self.log.error(e) continue try: blob_service.create_container(container_name=container_name, x_ms_blob_public_access='container') return True, storage.service_name, container_name except Exception as e: self.log.error('Encounter an error when creating container:%s ' % container_name) self.log.error(e) return False, None, None
def upload_block_blob(self, container_name, blob_name, file_path, storage_account, storage_key): result = self.list_storage_container(storage_account, storage_key) found = False blob_service = BlobService(account_name=storage_account, account_key=storage_key) for name in result: if name == container_name: found = True if found: self.logger.info("container is already exist") else: blob_service.create_container(container_name, None, None, False) blob_service.put_blob(container_name, blob_name, '', 'BlockBlob') data_sent = 0 sent = 0 block_ids = [] index = 0 with open(file_path, 'rb') as f: while True: data = f.read(self.chunk_size) if data: length = len(data) block_id = base64.b64encode(str(index)) blob_service.put_block(container_name, blob_name, data, block_id) block_ids.append(block_id) index += 1 data_sent += self.chunk_size sent = data_sent / (1024 * 1024) sys.stdout.write("\rUploaded data = %d MB" % sent) sys.stdout.flush() else: print "\n" break blob_service.put_block_list(container_name, blob_name, block_ids)
class BlobStorageTimedRotatingFileHandler(TimedRotatingFileHandler): """ Handler for logging to a file, rotating the log file at certain timed intervals. The outdated log file is shipped to the specified Windows Azure Storage blob container and removed from the local file system immediately. """ def __init__(self, filename, when='h', interval=1, encoding=None, delay=False, utc=False, account_name=None, account_key=None, protocol='https', container='logs', ): hostname = gethostname() self.meta = {'hostname': hostname, 'process': os.getpid()} s = super(BlobStorageTimedRotatingFileHandler, self) s.__init__(filename % self.meta, when=when, interval=interval, backupCount=1, encoding=encoding, delay=delay, utc=utc) self.service = BlobService(account_name, account_key, protocol) self.container_created = False self.meta['hostname'] = hostname.replace('_', '-') container = container % self.meta self.container = container.lower() self.meta['hostname'] = hostname def _put_log(self, dirName, fileName): """ Ship the outdated log file to the specified blob container. """ if not self.container_created: self.service.create_container(self.container) self.container_created = True with open(os.path.join(dirName, fileName), mode='rb') as f: self.service.put_blob(self.container, fileName, f.read(), 'BlockBlob', x_ms_blob_content_type='text/plain', ) def emit(self, record): """ Emit a record. Output the record to the file, catering for rollover as described in doRollover(). """ record.hostname = self.meta['hostname'] super(BlobStorageTimedRotatingFileHandler, self).emit(record) def getFilesToDelete(self): """ Determine the files to delete when rolling over. """ dirName, baseName = os.path.split(self.baseFilename) fileNames = os.listdir(dirName) result = [] prefix = baseName + "." plen = len(prefix) for fileName in fileNames: if fileName[:plen] == prefix: suffix = fileName[plen:] if self.extMatch.match(suffix): self._put_log(dirName, fileName) result.append(os.path.join(dirName, fileName)) # delete the stored log file from the local file system immediately return result
class SitesToAzureBlob: """ Class contains functions to upload a static website, which is loacated in a folder, to Windows Azure Blob storage service. """ def __init__(self, input_folder, output_folder = 'output', overwrite_output = False, remove_html_ext = True, overwrite_container = False, account_name = None, account_key = None, container_name = DEVSTORE_CONTAINER_NAME): ''' Constructor function. Creates a new container for blobs under the specified account. If the container with the same name already exists, delete it if overwrite_container is true. input_folder: The folder contains all the resources of the static website. output_folder: The folder contains all the resources uploaded. overwrite_output: Overwrites the output_folder anyway. remove_html_ext: Removes the .htm/.html in the url. overwrite_container: Deletes the existing container. account_name: Optional. Your storage account name, DEVSTORE_ACCOUNT_NAME is used if None. account_key: Optional. Your storage account key, DEVSTORE_ACCOUNT_KEY is used if None. container_name: Optional. Container name, DEVSTORE_CONTAINER_NAME is used if None. ''' self.input_folder = os.path.abspath(input_folder).lower() self.output_folder = os.path.abspath(output_folder).lower() self.overwrite_output = overwrite_output self.remove_html_ext = remove_html_ext self.account_name = account_name self.account_key = account_key self.container_name = container_name self.full_path_blob_name_dict = self.list_full_path_with_blob_name() if not account_name or not account_key: os.environ['EMULATED'] = 'true' else: os.environ['EMULATED'] = 'false' self.blob_service = BlobService(self.account_name, self.account_key) if overwrite_container: self.blob_service.delete_container(container_name) self.blob_service.create_container(container_name, x_ms_blob_public_access = 'container') def upload_files_to_blob(self): ''' Uploads the files to the blob. full_path_blob_name_dict: A dictionary whose key is the full_path of the file and the value is the blob_name. ''' #if self.remove_html_ext: # for blob_name in full_path_blob_name_dict.values(): # file_name, ext = os.path.splitext(blob_name) # if ext == '.html' or ext == '.htm': # self.html_blob_name_list.append(blob_name) curdir = os.getcwd() for full_path, blob_name in self.full_path_blob_name_dict.iteritems(): output_path = os.path.join(self.output_folder, blob_name) if not os.path.exists(os.path.dirname(output_path)): os.makedirs(os.path.dirname(output_path)) if self.overwrite_output is False and os.path.exists(output_path): if filecmp.cmp(full_path, output_path): print blob_name + ' skips...' continue print blob_name + ' is uploading...' file_name, ext = os.path.splitext(blob_name) file_blob = open(full_path, 'rb').read() content_type = self.fetch_content_type(ext) if ext == '.htm' or ext == '.html': if self.remove_html_ext: blob_name = file_name os.chdir(os.path.split(full_path)[0]) file_blob = self.adjust_url_links(file_blob) self.blob_service.put_blob(self.container_name, blob_name, file_blob, x_ms_blob_type = 'BlockBlob', x_ms_blob_content_type = content_type) shutil.copy(full_path, os.path.dirname(output_path)) os.chdir(curdir) print 'Done' def list_full_path_with_blob_name(self): ''' Fetches the full_path as key and blob_name as value into a dictionary. ''' dict = {} for root, dirs, files in os.walk(self.input_folder): for fi in files: full_path = os.path.abspath(os.path.join(root, fi)).lower() blob_name = self.list_blob_name(full_path) dict[full_path] = blob_name.replace('\\', '/') # To replace the Windows backslash \ in the blob_name with /. return dict def url_rep(self, matchobj): ''' This is called for every non-overlapping occurrence of pattern: href|src=[\'"]?([^\'" >]+). ''' url_blob_name = self.list_blob_name(os.path.abspath(matchobj.group(2))).replace('\\', '/') if url_blob_name in self.full_path_blob_name_dict.values(): file_name, ext = os.path.splitext(matchobj.group(2)) if self.remove_html_ext and ext == '.html' or ext == '.htm': return matchobj.group(1) + r'="' + file_name + '"' else: return matchobj.group(0) else: return matchobj.group(0) def adjust_url_links(self, file_content): ''' Adjusts the urls in href and src attributes. Removes the .html/.htm extension of the linked html files in the file_content if needed. file_content: the content of the html file ''' file_content = re.sub(r'(href|src)=[\'"]?([^\'" >]+)', self.url_rep, file_content) ''' Problem with using BeautifulSoup. It cannot preserve the '<', '>' in the <script type="text/template"...> ''' #html = soup(file_content) #for tag in html.findAll('a', {'href': True}): # href = tag['href'] # if href in html_blob_name_list: # tag['href'] = os.path.splitext(href)[0] #return str(html) return file_content def list_blob_name(self, full_path): ''' Gets the file path name in the input_folder for blob storage. If we uploaded from a subfolder (such as /search), we must rename blobs to have the 'folder/' prefix in their name. For example, if we uploaded index.html from search subfolder, rename the blob from 'index.html' to 'search/index.html'. ''' name = full_path.lower() name = name.replace(self.input_folder, '') if re.match('[A-Za-z0-9_-]', name[0]) is None: name=name[1:] return name def fetch_content_type(self, extension_name): ''' Fetches the content type from the extension name. ''' return { '.png': 'image/png', '.gif': 'image/gif', '.jpg':'image/jpg', '.jpeg':'image/jpeg', '.mp3':'audio/mp3', '.jar':'application/java-archive', '.zip': 'application/zip', '.htm': 'text/htm', '.html': 'text/html', '.js': 'application/javascript', '.txt': 'text/plain', '.css': 'text/css', '.xml':'text/xml', '.pdf':'application/pdf', '.json':'application/json' }.get(extension_name, None) # None is default if extensionName not found