def uri_put_file(creds, uri, fp, content_type=None): assert fp.tell() == 0 assert uri.startswith('wabs://') def log_upload_failures_on_error(exc_tup, exc_processor_cxt): def standard_detail_message(prefix=''): return (prefix + ' There have been {n} attempts to upload ' 'file {url} so far.'.format(n=exc_processor_cxt, url=uri)) typ, value, tb = exc_tup del exc_tup # Screen for certain kinds of known-errors to retry from if issubclass(typ, socket.error): socketmsg = value[1] if isinstance(value, tuple) else value logger.info( msg='Retrying upload because of a socket error', detail=standard_detail_message( "The socket error's message is '{0}'.".format(socketmsg))) else: # For all otherwise untreated exceptions, report them as a # warning and retry anyway -- all exceptions that can be # justified should be treated and have error messages # listed. logger.warning( msg='retrying file upload from unexpected exception', detail=standard_detail_message( 'The exception type is {etype} and its value is ' '{evalue} and its traceback is {etraceback}'.format( etype=typ, evalue=value, etraceback=''.join(traceback.format_tb(tb))))) # Help Python GC by resolving possible cycles del tb url_tup = urlparse(uri) kwargs = dict(content_settings=ContentSettings(content_type), validate_content=True) conn = BlockBlobService(creds.account_name, creds.account_key, sas_token=creds.access_token, protocol='https') conn.create_blob_from_bytes(url_tup.netloc, url_tup.path.lstrip('/'), fp.read(), **kwargs) # To maintain consistency with the S3 version of this function we must # return an object with a certain set of attributes. Currently, that set # of attributes consists of only 'size' return _Key(size=fp.tell())
def uri_put_file(creds, uri, fp, content_type=None): assert fp.tell() == 0 assert uri.startswith('wabs://') def log_upload_failures_on_error(exc_tup, exc_processor_cxt): def standard_detail_message(prefix=''): return (prefix + ' There have been {n} attempts to upload ' 'file {url} so far.'.format(n=exc_processor_cxt, url=uri)) typ, value, tb = exc_tup del exc_tup # Screen for certain kinds of known-errors to retry from if issubclass(typ, socket.error): socketmsg = value[1] if isinstance(value, tuple) else value logger.info( msg='Retrying upload because of a socket error', detail=standard_detail_message( "The socket error's message is '{0}'." .format(socketmsg))) else: # For all otherwise untreated exceptions, report them as a # warning and retry anyway -- all exceptions that can be # justified should be treated and have error messages # listed. logger.warning( msg='retrying file upload from unexpected exception', detail=standard_detail_message( 'The exception type is {etype} and its value is ' '{evalue} and its traceback is {etraceback}' .format(etype=typ, evalue=value, etraceback=''.join(traceback.format_tb(tb))))) # Help Python GC by resolving possible cycles del tb url_tup = urlparse(uri) kwargs = dict( content_settings=ContentSettings(content_type), validate_content=True) conn = BlockBlobService(creds.account_name, creds.account_key, sas_token=creds.access_token, protocol='https') conn.create_blob_from_bytes(url_tup.netloc, url_tup.path.lstrip('/'), fp.read(), **kwargs) # To maintain consistency with the S3 version of this function we must # return an object with a certain set of attributes. Currently, that set # of attributes consists of only 'size' return _Key(size=fp.tell())
def resize_and_convert(image, width=500, height=500, container="media"): if settings.DEBUG: img = Image.open(image) img = img.resize((width, height), Image.ANTIALIAS) img = img.convert("RGB") img.save(image.path, format="JPEG") else: temp = io.BytesIO() img = Image.open(image) img = img.resize((width, height), Image.ANTIALIAS) img = img.convert("RGB") img.save(temp, format="JPEG") bbs = BlockBlobService(account_name='liveportal2019', account_key=os.environ.get( 'LP_AZURE_STORAGE_KEY', '')) bbs.create_blob_from_bytes(container, image.name, temp.getvalue())