Exemple #1
0
def generateImageUrl(request):
    account_name = "faceemoji"
    account_key = "kaoJiy0T7r6sXyo4wFYKCLgpAXbILKvkloeF+kFpCEUxC+bL9BxGA3WtofVxHcLPn3lMjw/UO/0sS1GCN3/AQw=="
    blob_service = BlobService(account_name, account_key)
    content = base64.b64decode(request.data)
    st = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d-%H-%M-%S')
    blob_name = hashlib.sha224(st).hexdigest() + 'image.png'
    blob_service.put_block_blob_from_bytes('image', blob_name, content)
    img_url = blob_service.make_blob_url('image', blob_name)
    return img_url
def pushToAzureCDN (data):
	
	import pickle
	from azure.storage.blob import BlobService

	blob_service = BlobService(account_name=azureAccount, account_key=azureAccountKey)

	blob_service.put_block_blob_from_bytes(
	    azureContainer,
	    azureFile,
	    pickle.dumps(data),
	    content_encoding='application/octet-stream'
	)
Exemple #3
0
def save_image_to_azure(profile, url):
    try:
        response = request('GET', url)
        response.raise_for_status()
    except ConnectionError:
        pass
    else:
        service = BlobService(account_name=storagesettings.AZURE_ACCOUNT_NAME,
                              account_key=storagesettings.AZURE_ACCOUNT_KEY)

        service.put_block_blob_from_bytes(
            'avatars',
            profile.id,
            response.content,
            x_ms_blob_content_type=response.headers['content-type'])
Exemple #4
0
def save_image_to_azure(profile, url):
    try:
        response = request('GET', url)
        response.raise_for_status()
    except ConnectionError:
        pass
    else:
        service = BlobService(
            account_name=storagesettings.AZURE_ACCOUNT_NAME,
            account_key=storagesettings.AZURE_ACCOUNT_KEY)

        service.put_block_blob_from_bytes(
            'avatars',
            profile.id,
            response.content,
            x_ms_blob_content_type=response.headers['content-type']
        )
Exemple #5
0
class BlobSaver(Saver):
    def __init__(self, account, key, container, prefix):
        self.block_blob_service = BlobService(account_name=account,
                                              account_key=key)
        self.container = container
        self.prefix = prefix
        self.block_blob_service.create_container(self.container)

    def send_data(self, name, data):
        counter = BLOB_RETRIES
        while counter:
            try:
                self.block_blob_service.put_block_blob_from_bytes(
                    self.container, os.path.join(self.prefix, name), data)
            except AzureException as azure_exc:
                counter -= 1
            else:
                return
        raise RuntimeError("Couldn't send to blob." % (azure_exc.args[0]))
Exemple #6
0
# Specify the Azure Storage Account name where you will have a private blob to copy in the CSV file
STORAGEACCOUNTNAME = "ENTER AZURE STORAGE ACCOUNT NAME"
# Sepcify the storage account key.
# You can retrieve it "Primary Access Key" found on Azure portal Storage account blade by clicking on the "Key" icon.
# More info: https://azure.microsoft.com/en-us/documentation/articles/storage-create-storage-account/#manage-your-storage-access-keys
STORAGEKEY = "ENTER STORAGE ACCOUNT KEY "
#Read dataset
#Dataset is read from a public blob and copied to a private blob to locad it into SQL DW via Polybase

f = urllib2.urlopen('https://cahandson.blob.core.windows.net/nyctaxi/nyctaxipoint1pct.csv')
taxisample = f.read()
blob_service = BlobService(account_name=STORAGEACCOUNTNAME, account_key=STORAGEKEY)
blob_service.create_container('nyctaxinb')
blob_service.put_block_blob_from_bytes(
    'nyctaxinb',
    'nyctaxipoint1pct.csv',
    taxisample
)

# Construct the SQL DW Connection string
driver = 'DRIVER={' + DB_DRIVER + '}'
server = 'SERVER=' + SERVER_NAME
database = 'DATABASE=' + DATABASE_NAME
uid = 'UID=' + USERID
pwd = 'PWD=' + PASSWORD
CONNECTION_STRING = ';'.join([driver,server,database,uid,pwd, 'Encrypt=yes;TrustServerCertificate=no'])
print CONNECTION_STRING

# Connect to the Database. Autocommit needs to turned on for DDL statements
conn = pyodbc.connect(CONNECTION_STRING)
conn.autocommit=True