Esempio n. 1
0
class _BlobStorageFileHandler(object):
    def __init__(self,
                 account_name=None,
                 account_key=None,
                 protocol='https',
                 container='logs',
                 zip_compression=False,
                 max_connections=1,
                 max_retries=5,
                 retry_wait=1.0):
        self.service = BlobService(account_name, account_key, protocol)
        self.container_created = False
        hostname = gethostname()
        self.meta = {
            'hostname': hostname.replace('_', '-'),
            'process': os.getpid()
        }
        self.container = (container % self.meta).lower()
        self.meta['hostname'] = hostname
        self.zip_compression = zip_compression
        self.max_connections = max_connections
        self.max_retries = max_retries
        self.retry_wait = retry_wait

    def put_file_into_storage(self, dirName, fileName):
        """
        Ship the outdated log file to the specified blob container.
        """
        if not self.container_created:
            self.service.create_container(self.container)
            self.container_created = True
        fd, tmpfile_path = None, ''
        try:
            file_path = os.path.join(dirName, fileName)
            if self.zip_compression:
                suffix, content_type = '.zip', 'application/zip'
                fd, tmpfile_path = mkstemp(suffix=suffix)
                with os.fdopen(fd, 'wb') as f:
                    with ZipFile(f, 'w', ZIP_DEFLATED) as z:
                        z.write(file_path, arcname=fileName)
                file_path = tmpfile_path
            else:
                suffix, content_type = '', 'text/plain'
            self.service.put_block_blob_from_path(
                self.container,
                fileName + suffix,
                file_path,
                x_ms_blob_content_type=content_type,
                max_connections=self.max_connections,
                max_retries=self.max_retries,
                retry_wait=self.retry_wait)
        finally:
            if self.zip_compression and fd:
                os.remove(tmpfile_path)
Esempio n. 2
0
def upload_file_to_azure(in_file, file_name, container_name=settings.AZURE_CONTAINER):
    try:
        blob_service = BlobService(AZURE_ACCOUNT_NAME, AZURE_ACCOUNT_KEY)
        blob_service.put_block_blob_from_path(
            container_name=container_name,
            blob_name=file_name,
            file_path=in_file,
            x_ms_blob_content_type='application/octet-stream'
        )
    except Exception as ex:
        print("Failed to upload blob: {0}".format(ex))
Esempio n. 3
0
class _BlobStorageFileHandler(object):

    def __init__(self,
                  account_name=None,
                  account_key=None,
                  protocol='https',
                  container='logs',
                  zip_compression=False,
                  max_connections=1,
                  max_retries=5,
                  retry_wait=1.0):
        self.service = BlobService(account_name, account_key, protocol)
        self.container_created = False
        hostname = gethostname()
        self.meta = {'hostname': hostname.replace('_', '-'),
                     'process': os.getpid()}
        self.container = (container % self.meta).lower()
        self.meta['hostname'] = hostname
        self.zip_compression = zip_compression
        self.max_connections = max_connections
        self.max_retries = max_retries
        self.retry_wait = retry_wait

    def put_file_into_storage(self, dirName, fileName):
        """
        Ship the outdated log file to the specified blob container.
        """
        if not self.container_created:
            self.service.create_container(self.container)
            self.container_created = True
        fd, tmpfile_path = None, ''
        try:
            file_path = os.path.join(dirName, fileName)
            if self.zip_compression:
                suffix, content_type = '.zip', 'application/zip'
                fd, tmpfile_path = mkstemp(suffix=suffix)
                with os.fdopen(fd, 'wb') as f:
                    with ZipFile(f, 'w', ZIP_DEFLATED) as z:
                        z.write(file_path, arcname=fileName)
                file_path = tmpfile_path
            else:
                suffix, content_type = '', 'text/plain'
            self.service.put_block_blob_from_path(self.container,
                                                  fileName + suffix,
                                                  file_path,
                                                  x_ms_blob_content_type=content_type,
                                                  max_connections=self.max_connections,
                                                  max_retries=self.max_retries,
                                                  retry_wait=self.retry_wait)
        finally:
            if self.zip_compression and fd:
                os.remove(tmpfile_path)
def main():
    #continue here

    if verbose: 
        print('Copying')
        print('Storage account:',storage_account_name)
        print('Storage Key:',storage_account_key)
        print('Container Name:',storage_container_name)
        print('Input file:',inputfile)
        print('Output Blob:',outputblob)
    blob_service = BlobService(account_name=storage_account_name, account_key = storage_account_key)
    #blob_service.create_container(storage_container_name,x_ms_blob_public_access='containter')
    blob_service.put_block_blob_from_path(storage_container_name, outputblob,inputfile,x_ms_blob_content_type="image/jpeg")
Esempio n. 5
0
def upload_results():
    """
    :return: None
    """
    logger = logging.getLogger(__name__)
    results_fpath = '/data/wsdm_cup/results/results.tsv'
    logger.info('Uploading results from {0}'.format(results_fpath))
    blob_service = BlobService(account_name='wsdmcupchallenge',
                               sas_token=Config.SAS_TOKEN)
    blob_service.put_block_blob_from_path(container_name='bletchleypark',
                                          blob_name='results.tsv',
                                          file_path=results_fpath)
    logger.info('Done uploading')
    return
Esempio n. 6
0
def upload_results():
    """
    :return: None
    """
    logger = logging.getLogger(__name__)
    results_fpath = '/data/wsdm_cup/results/results.tsv'
    logger.info('Uploading results from {0}'.format(results_fpath))
    blob_service = BlobService(account_name='wsdmcupchallenge',
                               sas_token=Config.SAS_TOKEN)
    blob_service.put_block_blob_from_path(container_name='bletchleypark',
                                          blob_name='results.tsv',
                                          file_path=results_fpath)
    logger.info('Done uploading')
    return
def main():
    #continue here

    if verbose:
        print('Copying')
        print('Storage account:', storage_account_name)
        print('Storage Key:', storage_account_key)
        print('Container Name:', storage_container_name)
        print('Input file:', inputfile)
        print('Output Blob:', outputblob)
    blob_service = BlobService(account_name=storage_account_name,
                               account_key=storage_account_key)
    #blob_service.create_container(storage_container_name,x_ms_blob_public_access='containter')
    blob_service.put_block_blob_from_path(storage_container_name,
                                          outputblob,
                                          inputfile,
                                          x_ms_blob_content_type="image/jpeg")
Esempio n. 8
0
def main():
    #continue here

    if verbose: 
        print('Copying')
        print('Storage account:',storage_account_name)
        print('Storage Key:',storage_account_key)
        print('Container Name:',storage_container_name)
        print('Input file:',inputfile)
        print('Output Blob:',outputblob)
    blob_service = BlobService(account_name=storage_account_name, account_key = storage_account_key)
    #blob_service.create_container(storage_container_name,x_ms_blob_public_access='containter')
    blob_service.put_block_blob_from_path(storage_container_name, outputblob,inputfile,x_ms_blob_content_type="image/jpeg")
	#this access policy is valid for four minutes (now -120 seconds until now + 120 seconds) to account for clock skew
	ap= AccessPolicy(
        start = (datetime.datetime.utcnow() + datetime.timedelta(seconds=-120)).strftime('%Y-%m-%dT%H:%M:%SZ'),
        expiry = (datetime.datetime.utcnow() + datetime.timedelta(seconds=120)).strftime('%Y-%m-%dT%H:%M:%SZ'),
        permission=BlobSharedAccessPermissions.READ,
    )
Esempio n. 9
0
                theLog.flush()
        except AzureMissingResourceHttpError:
            pass

        # Try to put the blob out in the wild, provide MD5 for error
        # checking since M$ didn't feel the need to implement a return
        # code for this function

        # On further testing, the "content_md5" is only for header rather
        # than the actual blob content - have to wait for these APIs to mature
        try:
            theLog.write("Writing data to Blob {3} to {0}:{1}/{2}\n".format(azureAccount, ingestContainer, filename, stagingDir+"/"+filename))

            azureStorage.put_block_blob_from_path(ingestContainer,
                                                  filename,
                                                  stagingDir+"/"+filename,
                                                  #content_md5=md5Checksum.encode('base64').strip(),
                                                  max_connections=5)
            theLog.write("Wrote data to Blob\n")
            sleep(5)

            if not isClaims:
                theLog.write("Writing md5 to Blob {3} to {0}:{1}/{2}\n".format(azureAccount, 
                                                                               ingestContainer, 
                                                                               filename.split(".")[0] + ".md5", 
                                                                               md5FullFilePath)) 

                azureStorage.put_block_blob_from_path(ingestContainer,
                                                      filename.split(".")[0] + ".md5",
                                                      md5FullFilePath,
                                                      #content_md5=md5Checksum.encode('base64').strip(),
Esempio n. 10
0
with open('proxies.json') as proxies_file:
	proxy_data = json.load(proxies_file)
	proxy = proxy_data['uk']
sources = [
	animesources.Crunchyroll(titlemap, multiseason, 'uk', proxy), 
	animesources.FunimationNow(titlemap, multiseason, 'gb', proxy),
	animesources.Netflix(titlemap, multiseason, 'uk', proxy), 
	animesources.Daisuki(titlemap, multiseason, 'uk', proxy), 
	animesources.Viewster(titlemap, multiseason, 'uk', proxy),
	animesources.Animax(titlemap, multiseason, 'uk', proxy)]
for source in sources:
	source.UpdateShowList(shows)
	print(source.GetName() + ': ' + str(len(shows)))
with open('alternates.json') as alternates_file:
	alternates = json.load(alternates_file)
for alternate in alternates:
	match_index = next((i for i, x in enumerate(shows) if animesources.compare(x['name'], alternate)), False)
	if (match_index):
		shows[match_index]['alt'] = alternates[alternate]
shows = sorted(shows, key = lambda show: show['name'].lower())
blob = {"lastUpdated": datetime.utcnow().isoformat(), "shows": shows}
out_file = open('uk.json', 'w')
json.dump(blob, out_file)
out_file.close()
azure_blob.put_block_blob_from_path(
	'assets',
	'uk.json',
	'uk.json',
	x_ms_blob_content_type='application/json'
)
print('done')
Esempio n. 11
0
from azure.storage.blob import BlobService


blob_service = BlobService(account_name="<account_name>", account_key="<account_key>")

blob_service.create_container("datacontainer")

blob_service.create_container("datacontainer", x_ms_blob_public_access="container")

blob_service.set_container_acl("datacontainer", x_ms_blob_public_access="container")


blob_service.put_block_blob_from_path(
    "datacontainer", "datablob", "StorageClientPy.py", x_ms_blob_content_type="text/x-script.phyton"
)


blobs = []
marker = None
while True:
    batch = blob_service.list_blobs("datacontainer", marker=marker)
    blobs.extend(batch)
    if not batch.next_marker:
        break
    marker = batch.next_marker
for blob in blobs:
    print(blob.name)


blob_service.get_blob_to_path("datacontainer", "datablob", "out-StorageClientPy.py")
Esempio n. 12
0
class AzureIOStore(IOStore):
    """
    A class that lets you get input from and send output to Azure Storage.
    
    """
    
    def __init__(self, account_name, container_name, name_prefix=""):
        """
        Make a new AzureIOStore that reads from and writes to the given
        container in the given account, adding the given prefix to keys. All
        paths will be interpreted as keys or key prefixes.
        
        If the name prefix does not end with a trailing slash, and is not empty,
        one will be added automatically.
        
        Account keys are retrieved from the AZURE_ACCOUNT_KEY environment
        variable or from the ~/.toilAzureCredentials file, as in Toil itself.
        
        """
        
        # Make sure azure libraries actually loaded
        assert(have_azure)
        
        self.account_name = account_name
        self.container_name = container_name
        self.name_prefix = name_prefix
        
        if self.name_prefix != "" and not self.name_prefix.endswith("/"):
            # Make sure it has the trailing slash required.
            self.name_prefix += "/"
        
        # Sneak into Toil and use the same keys it uses
        self.account_key = toil.jobStores.azureJobStore._fetchAzureAccountKey(
            self.account_name)
            
        # This will hold out Azure blob store connection
        self.connection = None
        
    def __getstate__(self):
        """
        Return the state to use for pickling. We don't want to try and pickle
        an open Azure connection.
        """
     
        return (self.account_name, self.account_key, self.container_name, 
            self.name_prefix)
        
    def __setstate__(self, state):
        """
        Set up after unpickling.
        """
        
        self.account_name = state[0]
        self.account_key = state[1]
        self.container_name = state[2]
        self.name_prefix = state[3]
        
        self.connection = None
        
    def __connect(self):
        """
        Make sure we have an Azure connection, and set one up if we don't.
        """
        
        if self.connection is None:
            RealTimeLogger.get().debug("Connecting to account {}, using "
                "container {} and prefix {}".format(self.account_name,
                self.container_name, self.name_prefix))
        
            # Connect to the blob service where we keep everything
            self.connection = BlobService(
                account_name=self.account_name, account_key=self.account_key)
            
    @backoff        
    def read_input_file(self, input_path, local_path):
        """
        Get input from Azure.
        """
        
        self.__connect()
        
        
        RealTimeLogger.get().debug("Loading {} from AzureIOStore".format(
            input_path))
        
        # Download the blob. This is known to be synchronous, although it can
        # call a callback during the process.
        self.connection.get_blob_to_path(self.container_name,
            self.name_prefix + input_path, local_path)
            
    def list_input_directory(self, input_path, recursive=False,
        with_times=False):
        """
        Loop over fake /-delimited directories on Azure. The prefix may or may
        not not have a trailing slash; if not, one will be added automatically.
        
        Returns the names of files and fake directories in the given input fake
        directory, non-recursively.
        
        If with_times is specified, will yield (name, time) pairs including
        modification times as datetime objects. Times on directories are None.
        
        """
        
        self.__connect()
        
        RealTimeLogger.get().info("Enumerating {} from AzureIOStore".format(
            input_path))
        
        # Work out what the directory name to list is
        fake_directory = self.name_prefix + input_path
        
        if fake_directory != "" and not fake_directory.endswith("/"):
            # We have a nonempty prefix, and we need to end it with a slash
            fake_directory += "/"
        
        # This will hold the marker that we need to send back to get the next
        # page, if there is one. See <http://stackoverflow.com/a/24303682>
        marker = None
        
        # This holds the subdirectories we found; we yield each exactly once if
        # we aren't recursing.
        subdirectories = set()
        
        while True:
        
            # Get the results from Azure. We don't use delimiter since Azure
            # doesn't seem to provide the placeholder entries it's supposed to.
            
            result = self.connection.list_blobs(self.container_name, 
                marker=marker)
                
            RealTimeLogger.get().info("Found {} files".format(len(result)))
                
            for blob in result:
                # Yield each result's blob name, but directory names only once
                
                # Drop the common prefix
                relative_path = blob.name
                
                if (not recursive) and "/" in relative_path:
                    # We found a file in a subdirectory, and we aren't supposed
                    # to be recursing.
                    subdirectory, _ = relative_path.split("/", 1)
                    
                    if subdirectory not in subdirectories:
                        # It's a new subdirectory. Yield and remember it
                        subdirectories.add(subdirectory)
                        
                        if with_times:
                            yield subdirectory, None
                        else:
                            yield subdirectory
                else:
                    # We found an actual file 
                    if with_times:
                        mtime = dateutil.parser.parse(
                            blob.properties.last_modified).replace(
                            tzinfo=dateutil.tz.tzutc())
                        yield relative_path, mtime
                            
                    else:
                        yield relative_path
                
            # Save the marker
            marker = result.next_marker
                
            if not marker:
                break
                
    @backoff
    def write_output_file(self, local_path, output_path):
        """
        Write output to Azure. Will create the container if necessary.
        """
        
        self.__connect()
        
        RealTimeLogger.get().debug("Saving {} to AzureIOStore".format(
            output_path))
        
        try:
            # Make the container
            self.connection.create_container(self.container_name)
        except azure.WindowsAzureConflictError:
            # The container probably already exists
            pass
        
        # Upload the blob (synchronously)
        # TODO: catch no container error here, make the container, and retry
        self.connection.put_block_blob_from_path(self.container_name,
            self.name_prefix + output_path, local_path)
    
    @backoff        
    def exists(self, path):
        """
        Returns true if the given input or output file exists in Azure already.
        
        """
        
        self.__connect()
        
        marker = None
        
        while True:
        
            try:
                # Make the container
                self.connection.create_container(self.container_name)
            except azure.WindowsAzureConflictError:
                # The container probably already exists
                pass
            
            # Get the results from Azure.
            result = self.connection.list_blobs(self.container_name, 
                prefix=self.name_prefix + path, marker=marker)
                
            for blob in result:
                # Look at each blob
                
                if blob.name == self.name_prefix + path:
                    # Found it
                    return True
                
            # Save the marker
            marker = result.next_marker
                
            if not marker:
                break 
        
        return False
        
        
    @backoff        
    def get_mtime(self, path):
        """
        Returns the modification time of the given blob if it exists, or None
        otherwise.
        
        """
        
        self.__connect()
        
        marker = None
        
        while True:
        
            # Get the results from Azure.
            result = self.connection.list_blobs(self.container_name, 
                prefix=self.name_prefix + path, marker=marker)
                
            for blob in result:
                # Look at each blob
                
                if blob.name == self.name_prefix + path:
                    # Found it
                    return dateutil.parser.parse(
                        blob.properties.last_modified).replace(
                        tzinfo=dateutil.tz.tzutc())
                
            # Save the marker
            marker = result.next_marker
                
            if not marker:
                break 
        
        return None
from azure.storage.blob import BlobService
import subprocess 
import uuid

print("debug")


blob_service = BlobService(account_name='{PUT YOUR ACCOUNT NAME HERE}', account_key='{PUT YOUR ACCOUNT KEY HERE}')

blob_service.create_container('mycontainer', x_ms_blob_public_access='container')
while True: 
	subprocess.call(['./campic.sh'])

	image="/home/pi/webcam/image.jpg"

	blob_service.put_block_blob_from_path(
		'mycontainer',
    		uuid.uuid1(),
    		image,
    		x_ms_blob_content_type='image/jpg'
	)

Esempio n. 14
0
# upload a file to Azure Blob Storage

from azure.storage.blob import BlobService

# Add your details here
blob_service = BlobService(account_name="", account_key="")

blob_service.put_block_blob_from_path("container", "remote-name.jpg",
                                      "localfile.jpg")
Esempio n. 15
0
class Azure(object):
    '''
    A class used to connect to the Azure storage and
    upload/download files using blob storage
    '''
    def __init__(self, params={}):
        '''
        Constructor for the Azure object

        '''
        if "user" in params:
            self.user = params["user"]
        else:
            self.user = None
        if "key" in params:
            self.key = params["key"]
        else:
            self.key = None

    def connect(self, host, port, user, password, secure):
        '''
        Connect to the Azure service with given user and key
        @param user - username to use to connect to
        @param key - key to use to connect
        '''
        kwargs = {}
        err = None
        if not host is None:
            kwargs["host_base"] = "." + host
        if not user is None:
            kwargs["account_name"] = user
        elif not self.user is None:
            kwargs["account_name"] = self.user
        if not password is None:
            kwargs["account_key"] = password
        elif not self.key is None:
            kwargs["account_key"] = self.key
        kwargs["protocol"] = "https" if secure else "http"
        try:
            self.service = BlobService(**kwargs)
        except Exception as e:
            err = e.message
            self.service = None
        if self.service is None:
            raise OsakaException("Failed to connect to Azure:" +
                                 ("" if err is None else err))

    @classmethod
    def getSchemes(clazz):
        '''
        Returns a list of schemes this handler handles
        Note: handling the scheme of another handler produces unknown results
        @returns list of handled schemes
        '''
        return ["azure", "azures"]

    def close(self):
        '''
        Close this service
        '''
        pass

    def put(self, path, url):
        '''
        Put a file up to the cloud
        @param path - path to upload
        @param url - path in cloud to upload too
        '''
        if os.path.isdir(path):
            return walk(self.put, path, url)
        cont, blob = get_container_and_path(urlparse.urlparse(url).path)
        self.service.create_container(cont)
        self.service.put_block_blob_from_path(cont, blob, path)
        return True

    def get(self, url, dest):
        '''
        Get file(s) from the cloud
        @param url - url on cloud to pull down (on cloud)
        @param dest - dest to download too
        '''
        cont, blob = get_container_and_path(urlparse.urlparse(url).path)
        for b in self.service.list_blobs(cont, prefix=blob):
            destination = os.path.join(dest, os.path.relpath(
                b.name, blob)) if blob != b.name else dest
            if not os.path.exists(os.path.dirname(destination)):
                os.mkdir(os.path.dirname(destination))
            self.service.get_blob_to_path(cont, b.name, destination)
        return True

    def rm(self, url):
        '''
        Remove this url and all children urls
        @param url - url to remove
        '''
        cont, blob = get_container_and_path(urlparse.urlparse(url).path)
        for b in self.service.list_blobs(cont, prefix=blob):
            self.service.delete_blob(cont, b.name)
        return True
Esempio n. 16
0
from azure.storage.blob import BlobService
import os

blob_service = BlobService(account_name='songanalysis',
                           account_key=os.environ['AZUREKEY'])
blob_service.put_block_blob_from_path(
    'songs',
    '10_happy',  # number before the song emotion corresponds to 'sentValue' from songdictionary.py. 10 = 1.0, 08 = 0.8, etc.
    '10_happy.mp3',
    x_ms_blob_content_type='mp3')
Esempio n. 17
0
        azureStorage.delete_blob(ingestContainer, targetIngestFullPath)
        theLog.write("Existing ingest blob found, deleting it\n\n")
        theLog.flush()
    except AzureMissingResourceHttpError:
        pass

    # Try to put the blob out in the wild, provide MD5 for error
    # checking since M$ didn't feel the need to implement a return
    # code for this function

    # On further testing, the "content_md5" is only for header rather
    # than the actual blob content - have to wait for these APIs to mature
    try:
        azureStorage.put_block_blob_from_path(ingestContainer,
                                              targetIngestFullPath,
                                              fullFilePath,
                                              #content_md5=md5Checksum.encode('base64').strip(),
                                              max_connections=5)
        theLog.write("Uploaded blob to ingest container : {0}\n".format(ingestContainer))
        theLog.flush()
    except AzureHttpError as e:
        result = "Ingest-Failed:" + e.message.split(".")[0]
        theLog.write("Upload exception: {0}\n\n".format(result))
        theLog.flush()


    # Create a list of queries for Hive
    hiveQueries = []

    sortedByString = "SORTED BY(GenPatientID)"
    if dataSetType == "Clients" or dataSetType == "Providers":
Esempio n. 18
0
with open('proxies.json') as proxies_file:
    proxy_data = json.load(proxies_file)
    proxy = proxy_data['au']
sources = [
    animesources.Crunchyroll(titlemap, multiseason, 'au', proxy),
    animesources.Netflix(titlemap, multiseason, 'au', proxy),
    animesources.Daisuki(titlemap, multiseason, 'au', proxy),
    animesources.Viewster(titlemap, multiseason, 'au', proxy),
    animesources.AnimeLab(titlemap, multiseason, 'au', proxy),
    animesources.Hanabee(titlemap, multiseason, 'au', proxy)
]
for source in sources:
    source.UpdateShowList(shows)
    print(source.GetName() + ': ' + str(len(shows)))
with open('alternates.json') as alternates_file:
    alternates = json.load(alternates_file)
for alternate in alternates:
    match_index = next((i for i, x in enumerate(shows)
                        if animesources.compare(x['name'], alternate)), False)
    if (match_index):
        shows[match_index]['alt'] = alternates[alternate]
shows = sorted(shows, key=lambda show: show['name'].lower())
blob = {"lastUpdated": datetime.utcnow().isoformat(), "shows": shows}
out_file = open('au.json', 'w')
json.dump(blob, out_file)
out_file.close()
azure_blob.put_block_blob_from_path('assets',
                                    'au.json',
                                    'au.json',
                                    x_ms_blob_content_type='application/json')
print('done')
Esempio n. 19
0
from azure.storage.blob import BlobService
import os

blob_service = BlobService(account_name='newsfeels', account_key=os.environ['AZUREKEY'])
blob_service.put_block_blob_from_path(
    'songs',
    '-02_diamonds',       # number before the song emotion corresponds to 'sentValue' from songdictionary.py. 10 = 1.0, 08 = 0.8, etc.
    '-02_diamonds.mp3',
    x_ms_blob_content_type='mp3'
)
# The Storage Account Name
storage_account_name = 'larryfrstore'
storage_account_key = 'Vm7YUAvuKQFjNSv2xY3ckgxwLUMkECUHNFF09lipZY2QxNgTFDHbA7o4U6joHXg+/Wd23sHkukjZUp41siTtwQ=='
storage_container_name = 'mycontainer'
example_file_path = '..\\sampledata\\sample.log'

# Create the blob service, using the name and key for your Azure Storage account
blob_service = BlobService(storage_account_name, storage_account_key)

# Create the container, if it does not already exist
blob_service.create_container(storage_container_name)

# Upload an example file to the container
blob_service.put_block_blob_from_path(
    storage_container_name,
    'sample.log',
    example_file_path,
)

# Create a new signed identifier (policy)
si = SignedIdentifier()
# Set the name
si.id = policy_name
# Set the expiration date
si.access_policy.expiry = '2016-01-01'
# Set the permissions. Read and List in this example
si.access_policy.permission = ContainerSharedAccessPermissions.READ + ContainerSharedAccessPermissions.LIST

# Get the existing signed identifiers (policies) for the container
identifiers = blob_service.get_container_acl(storage_container_name)
# And append the new one ot the list
class AzureConnector():

    def __init__(self, config):

        tree = ET.parse('SharedConfig.xml')
        self.myMachineName = tree.find('.//Instance').get("id")

        self.sms = ServiceManagementService(
            subscription_id=config.get("azure", "subscription_id"),
            cert_file=config.get("azure", "cert_file")
        );

        self.bus_service = ServiceBusService(
            service_namespace=config.get("azure", "bus_namespace"),
            shared_access_key_name=config.get("azure", "bus_shared_access_key_name"),
            shared_access_key_value=config.get("azure", "bus_shared_access_key_value"))

        self.command_queue = config.get("azure", "commandQueuePath")
        for tries in range(1,10):
            try:
                self.bus_service.create_queue(self.command_queue)
                break
            except:
                print "Esperando"
            
        self.status_topic = config.get("azure", "statusTopicPath")
        self.bus_service.create_queue(self.status_topic)

        self.storage = BlobService(account_name=config.get("azure", "account_name"),
                                   account_key=config.get("azure", "account_key"))

        self.algo_storage_name = config.get("azure", "algorithm_storage_name")
        self.storage.create_container(self.algo_storage_name, fail_on_exist=False)

        self.proj_storage_name = config.get("azure", "project_storage_name")
        self.storage.create_container(self.proj_storage_name, fail_on_exist=False)

    def check_new_tasks(self):

        for tries in range(1,2):
            try:
                message = self.bus_service.receive_queue_message(self.command_queue, peek_lock=False, timeout=60)
                break
            except:
                message = None

        if message is None or message.body is None:
            return None

        job_description = json.loads(message.body.replace('/AzureBlobStorage/', ''))

        command = CommandMetadata(
            command_id = job_description["command_id"],
            algorithm_directory = job_description["algorithm_prfx"],
            project_prfx = job_description["project_prfx"],
            project_input_files = job_description["project_input_files"],
            algorithm_executable_name = job_description["algorithm_executable_name"],
            algorithm_parameters = job_description["algorithm_parameters"],
            sent_timestamp = datetime.datetime.strptime(job_description["sent_timestamp"], "%d/%m/%Y %H:%M:%S"),
            machine_size=job_description["machine_size"])

        # Retornar dados sobre o comando consumido da fila
        return command

        # Não há nada na fila
        return None

    def list_algo_files(self, prfx):

        list = self.storage.list_blobs(container_name=self.algo_storage_name, prefix=prfx)
        result = []
        for blob in list:
            result.append(blob.name)
        return result

    def download_algo_zip(self, algorithm_bin_file, tmp_file):
        print "download_algo_zip(algorithm_bin_file="+algorithm_bin_file+", tmp_file="+tmp_file+")"
        for tries in range(1,5):
            try:
                self.storage.get_blob_to_path(self.algo_storage_name, algorithm_bin_file, tmp_file,
                                 open_mode='wb', snapshot=None, x_ms_lease_id=None,
                                 progress_callback=None)
                break

            except Exception as e:

                if tries == 5:
                    print("Muitos erros de conexão. Operação abortada.")
                else:
                    print("Erro de conexão com serviço. Retentando..." + e.__str__())

    def download_file_to_project(self, project_name, blob_name, dir):
        print "download_file_to_project(project_name="+project_name+", blob_name="+blob_name+", dir="+dir+")"
        for tries in range(1,5):
            try:
                self.storage.get_blob_to_path(self.proj_storage_name,
                                              os.path.join(project_name,blob_name),
                                              os.path.join(dir,os.path.join(project_name,blob_name)),
                                              open_mode='wb', snapshot=None, x_ms_lease_id=None,
                                              progress_callback=None)
                break

            except Exception as e:

                if tries == 5:
                    print("Muitos erros de conexão. Operação abortada.")
                else:
                    print("Erro de conexão com serviço. Retentando..." + e.__str__())

    def download_file_to_project(self, project_name, blob_name, dir):
        print "download_file_to_project(project_name="+project_name+", blob_name="+blob_name+", dir="+dir+")"
        for tries in range(1,5):
            try:
                self.storage.get_blob_to_path(self.proj_storage_name,
                                              os.path.join(project_name,blob_name),
                                              os.path.join(dir,os.path.join(project_name,blob_name)),
                                              open_mode='wb', snapshot=None, x_ms_lease_id=None,
                                              progress_callback=None)
                break

            except Exception as e:

                if tries == 5:
                    print("Muitos erros de conexão. Operação abortada.")
                else:
                    print("Erro de conexão com serviço. Retentando..." + e.__str__())

    def upload_proj_file(self, project_name, blob_name, dir):
        print "upload_proj_file(project_name="+project_name+", blob_name="+blob_name+", dir="+dir+")"
        if blob_name[0] == '/':
            blob_name = blob_name[1:]
        for tries in range(1,5):
            try:
                self.storage.put_block_blob_from_path(self.proj_storage_name,
                                              os.path.join(project_name,blob_name),
                                              os.path.join(dir,os.path.join(project_name,blob_name)))
                break

            except Exception as e:

                if tries == 5:
                    print("Muitos erros de conexão. Operação abortada.")
                else:
                    print("Erro de conexão com serviço. Retentando..." + e.__str__())

    def download_file_to_algo(self, blob_name, dir):
        print "download_file_to_algo(blob_name="+blob_name+", dir="+dir+")"

        for tries in range(1,5):
            try:
                self.storage.get_blob_to_path(container_name=self.algo_storage_name,
                                              blob_name=os.path.join(blob_name),
                                              file_path=os.path.join(dir,blob_name),
                                              open_mode='wb', snapshot=None, x_ms_lease_id=None,
                                              progress_callback=None)
                break

            except Exception as e:

                if tries == 5:
                    print("Muitos erros de conexão. Operação abortada.")
                else:
                    print("Erro de conexão com serviço. Retentando..." + e.__str__())


    def send_status(self, main_status):
        for tries in range(1,5):
            try:
                self.bus_service.send_topic_message(topic_name=self.status_topic,
                                                    message=Message(main_status.encode('utf-8')))
                break

            except Exception as e:

                if tries == 5:
                    print("Muitos erros de conexão. Operação abortada.")
                else:
                    print("Erro de conexão com serviço. Retentando..." + e.__str__())

    def shutdown_myself(self):

        # A máquina virtual irá cometer suicídio.
        print("Removendo máquina virtual da nuvem...")
        for tries in range(1,5):
            try:
                self.sms.delete_deployment(
                    service_name=self.myMachineName,
                    deployment_name=self.myMachineName, delete_vhd=True)
                exit(0)
                break

            except Exception as e:

                if tries == 5:
                    print("Muitos erros de conexão. Operação abortada.")
                else:
                    print("Erro de conexão com serviço. Retentando..." + e.__str__())
Esempio n. 22
0
class AzureIOStore(IOStore):
    """
    A class that lets you get input from and send output to Azure Storage.
    
    """
    def __init__(self, account_name, container_name, name_prefix=""):
        """
        Make a new AzureIOStore that reads from and writes to the given
        container in the given account, adding the given prefix to keys. All
        paths will be interpreted as keys or key prefixes.
        
        If the name prefix does not end with a trailing slash, and is not empty,
        one will be added automatically.
        
        Account keys are retrieved from the AZURE_ACCOUNT_KEY environment
        variable or from the ~/.toilAzureCredentials file, as in Toil itself.
        
        """

        # Make sure azure libraries actually loaded
        assert (have_azure)

        self.account_name = account_name
        self.container_name = container_name
        self.name_prefix = name_prefix

        if self.name_prefix != "" and not self.name_prefix.endswith("/"):
            # Make sure it has the trailing slash required.
            self.name_prefix += "/"

        # Sneak into Toil and use the same keys it uses
        self.account_key = toil.jobStores.azureJobStore._fetchAzureAccountKey(
            self.account_name)

        # This will hold out Azure blob store connection
        self.connection = None

    def __getstate__(self):
        """
        Return the state to use for pickling. We don't want to try and pickle
        an open Azure connection.
        """

        return (self.account_name, self.account_key, self.container_name,
                self.name_prefix)

    def __setstate__(self, state):
        """
        Set up after unpickling.
        """

        self.account_name = state[0]
        self.account_key = state[1]
        self.container_name = state[2]
        self.name_prefix = state[3]

        self.connection = None

    def __connect(self):
        """
        Make sure we have an Azure connection, and set one up if we don't.
        """

        if self.connection is None:
            RealtimeLogger.debug("Connecting to account {}, using "
                                 "container {} and prefix {}".format(
                                     self.account_name, self.container_name,
                                     self.name_prefix))

            # Connect to the blob service where we keep everything
            self.connection = BlobService(account_name=self.account_name,
                                          account_key=self.account_key)

    @backoff
    def read_input_file(self, input_path, local_path):
        """
        Get input from Azure.
        """

        self.__connect()

        RealtimeLogger.debug("Loading {} from AzureIOStore".format(input_path))

        # Download the blob. This is known to be synchronous, although it can
        # call a callback during the process.
        self.connection.get_blob_to_path(self.container_name,
                                         self.name_prefix + input_path,
                                         local_path)

    def list_input_directory(self,
                             input_path,
                             recursive=False,
                             with_times=False):
        """
        Loop over fake /-delimited directories on Azure. The prefix may or may
        not not have a trailing slash; if not, one will be added automatically.
        
        Returns the names of files and fake directories in the given input fake
        directory, non-recursively.
        
        If with_times is specified, will yield (name, time) pairs including
        modification times as datetime objects. Times on directories are None.
        
        """

        self.__connect()

        RealtimeLogger.info(
            "Enumerating {} from AzureIOStore".format(input_path))

        # Work out what the directory name to list is
        fake_directory = self.name_prefix + input_path

        if fake_directory != "" and not fake_directory.endswith("/"):
            # We have a nonempty prefix, and we need to end it with a slash
            fake_directory += "/"

        # This will hold the marker that we need to send back to get the next
        # page, if there is one. See <http://stackoverflow.com/a/24303682>
        marker = None

        # This holds the subdirectories we found; we yield each exactly once if
        # we aren't recursing.
        subdirectories = set()

        while True:

            # Get the results from Azure. We don't use delimiter since Azure
            # doesn't seem to provide the placeholder entries it's supposed to.
            result = self.connection.list_blobs(self.container_name,
                                                prefix=fake_directory,
                                                marker=marker)

            RealtimeLogger.info("Found {} files".format(len(result)))

            for blob in result:
                # Yield each result's blob name, but directory names only once

                # Drop the common prefix
                relative_path = blob.name[len(fake_directory):]

                if (not recursive) and "/" in relative_path:
                    # We found a file in a subdirectory, and we aren't supposed
                    # to be recursing.
                    subdirectory, _ = relative_path.split("/", 1)

                    if subdirectory not in subdirectories:
                        # It's a new subdirectory. Yield and remember it
                        subdirectories.add(subdirectory)

                        if with_times:
                            yield subdirectory, None
                        else:
                            yield subdirectory
                else:
                    # We found an actual file
                    if with_times:
                        mtime = blob.properties.last_modified

                        if isinstance(mtime, datetime.datetime):
                            # Make sure we're getting proper localized datetimes
                            # from the new Azure Storage API.
                            assert (mtime.tzinfo is not None and
                                    mtime.tzinfo.utcoffset(mtime) is not None)
                        else:
                            # Convert mtime from a string as in the old API.
                            mtime = dateutil.parser.parse(mtime).replace(
                                tzinfo=dateutil.tz.tzutc())

                        yield relative_path, mtime

                    else:
                        yield relative_path

            # Save the marker
            marker = result.next_marker

            if not marker:
                break

    @backoff
    def write_output_file(self, local_path, output_path):
        """
        Write output to Azure. Will create the container if necessary.
        """

        self.__connect()

        RealtimeLogger.debug("Saving {} to AzureIOStore".format(output_path))

        try:
            # Make the container
            self.connection.create_container(self.container_name)
        except azure.WindowsAzureConflictError:
            # The container probably already exists
            pass

        # Upload the blob (synchronously)
        # TODO: catch no container error here, make the container, and retry
        self.connection.put_block_blob_from_path(
            self.container_name, self.name_prefix + output_path, local_path)

    @backoff
    def exists(self, path):
        """
        Returns true if the given input or output file exists in Azure already.
        
        """

        self.__connect()

        marker = None

        while True:

            try:
                # Make the container
                self.connection.create_container(self.container_name)
            except azure.WindowsAzureConflictError:
                # The container probably already exists
                pass

            # Get the results from Azure.
            result = self.connection.list_blobs(self.container_name,
                                                prefix=self.name_prefix + path,
                                                marker=marker)

            for blob in result:
                # Look at each blob

                if blob.name == self.name_prefix + path:
                    # Found it
                    return True

            # Save the marker
            marker = result.next_marker

            if not marker:
                break

        return False

    @backoff
    def get_mtime(self, path):
        """
        Returns the modification time of the given blob if it exists, or None
        otherwise.
        
        """

        self.__connect()

        marker = None

        while True:

            # Get the results from Azure.
            result = self.connection.list_blobs(self.container_name,
                                                prefix=self.name_prefix + path,
                                                marker=marker)

            for blob in result:
                # Look at each blob

                if blob.name == self.name_prefix + path:
                    # Found it
                    mtime = blob.properties.last_modified

                    if isinstance(mtime, datetime.datetime):
                        # Make sure we're getting proper localized datetimes
                        # from the new Azure Storage API.
                        assert (mtime.tzinfo is not None
                                and mtime.tzinfo.utcoffset(mtime) is not None)
                    else:
                        # Convert mtime from a string as in the old API.
                        mtime = dateutil.parser.parse(mtime).replace(
                            tzinfo=dateutil.tz.tzutc())

                    return mtime

            # Save the marker
            marker = result.next_marker

            if not marker:
                break

        return None

    @backoff
    def get_size(self, path):
        """
        Returns the size in bytes of the given blob if it exists, or None
        otherwise.
        
        """

        self.__connect()

        marker = None

        while True:

            # Get the results from Azure.
            result = self.connection.list_blobs(self.container_name,
                                                prefix=self.name_prefix + path,
                                                marker=marker)

            for blob in result:
                # Look at each blob

                if blob.name == self.name_prefix + path:
                    # Found it
                    size = blob.properties.content_length

                    return size

            # Save the marker
            marker = result.next_marker

            if not marker:
                break

        return None
Esempio n. 23
0
from azure.storage.blob import BlobService
from os import listdir
from os.path import isfile, join

# Set parameters here
ACCOUNT_NAME = "felizaaccount"
ACCOUNT_KEY = "R54FPF/DGfUzRLulWCTj3USexrYsUIRxieMqITloF3c8RqEK6O4izu908BhK9TtzJeVgFL4DUJ1EBvVHn2sgFA==<your_account_key>"
CONTAINER_NAME = "felizacontainer"
LOCAL_DIRECT = "/home/pi/Main/DataSet"

blob_service = BlobService(account_name=ACCOUNT_NAME, account_key=ACCOUNT_KEY)
# find all files in the LOCAL_DIRECT (excluding directory)
local_file_list = [
    f for f in listdir(LOCAL_DIRECT) if isfile(join(LOCAL_DIRECT, f))
]

file_num = len(local_file_list)
for i in range(file_num):
    local_file = join(LOCAL_DIRECT, local_file_list[i])
    blob_name = local_file_list[i]
    try:
        blob_service.put_block_blob_from_path(CONTAINER_NAME, blob_name,
                                              local_file)
    except:
        print "something wrong happened when uploading the data %s" % blob_name