class FileService(Component):
    def __init__(self):
        self.blob_service = None

    def generate_blob_service(self):
        if self.blob_service is None:
            # if storage info doesn't exist in config.py upload file function stop working
            self.blob_service = BlobService(account_name=self.util.get_config("storage.azure.account_name"),
                                            account_key=self.util.get_config("storage.azure.account_key"),
                                            host_base=self.util.get_config("storage.azure.blob_service_host_base"))

    def create_container_in_storage(self, container_name, access):
        """
        create a container if doesn't exist
        :param container_name:
        :param access:
        :return:
        """
        self.generate_blob_service()
        try:
            names = map(lambda x: x.name, self.blob_service.list_containers())
            if container_name not in names:
                self.blob_service.create_container(container_name, x_ms_blob_public_access=access)
            else:
                self.log.debug("container already exsit in storage")
            return True
        except Exception as e:
            self.log.error(e)
            return False

    def upload_file_to_azure(self, stream, container_name, blob_name):
        try:
            if self.create_container_in_storage(container_name, 'container'):
                self.blob_service.put_block_blob_from_file(container_name, blob_name, stream)
                return self.blob_service.make_blob_url(container_name, blob_name)
            else:
                return None
        except Exception as e:
            self.log.error(e)
            return None

    def upload_file_to_azure_from_path(self, path, container_name, blob_name):
        try:
            if self.create_container_in_storage(container_name, 'container'):
                self.blob_service.put_block_blob_from_path(container_name, blob_name, path)
                return self.blob_service.make_blob_url(container_name, blob_name)
            else:
                return None
        except Exception as e:
            self.log.error(e)
            return None

    def delete_file_from_azure(self, container_name, blob_name):
        try:
            if self.create_container_in_storage(container_name, 'container'):
                self.blob_service.delete_blob(container_name, blob_name)
        except Exception as e:
            self.log.error(e)
            return None
Exemple #2
0
class AzureTransfer(BaseTransfer):
    def __init__(self, account_name, account_key, container_name):
        BaseTransfer.__init__(self)
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = container_name
        self.conn = BlobService(account_name=self.account_name, account_key=self.account_key)
        self.container = self.get_or_create_container(self.container_name)
        self.log.debug("AzureTransfer initialized")

    def get_metadata_for_key(self, key):
        key = fix_path(key)
        return self.list_path(key)[0]['metadata']

    def list_path(self, path):
        return_list = []
        path = fix_path(path)
        self.log.info("Asking for listing of: %r", path)
        for r in self.conn.list_blobs(self.container_name, prefix=path, delimiter="/",
                                      include="metadata"):
            entry = {"name": r.name, "size": r.properties.content_length,
                     "last_modified": dateutil.parser.parse(r.properties.last_modified),
                     "metadata": r.metadata}
            return_list.append(entry)
        return return_list

    def delete_key(self, key_name):
        key_name = fix_path(key_name)
        self.log.debug("Deleting key: %r", key_name)
        return self.conn.delete_blob(self.container_name, key_name)

    def get_contents_to_file(self, obj_key, filepath_to_store_to):
        obj_key = fix_path(obj_key)
        self.log.debug("Starting to fetch the contents of: %r to: %r", obj_key, filepath_to_store_to)
        return self.conn.get_blob_to_path(self.container_name, obj_key, filepath_to_store_to)

    def get_contents_to_string(self, obj_key):
        obj_key = fix_path(obj_key)
        self.log.debug("Starting to fetch the contents of: %r", obj_key)
        return self.conn.get_blob_to_bytes(self.container_name, obj_key), self.get_metadata_for_key(obj_key)

    def store_file_from_memory(self, key, memstring, metadata=None):
        # For whatever reason Azure requires all values to be strings at the point of sending
        metadata_to_send = dict((str(k), str(v)) for k, v in metadata.items())
        self.conn.put_block_blob_from_bytes(self.container_name, key, memstring,
                                            x_ms_meta_name_values=metadata_to_send)

    def store_file_from_disk(self, key, filepath, metadata=None):
        # For whatever reason Azure requires all values to be strings at the point of sending
        metadata_to_send = dict((str(k), str(v)) for k, v in metadata.items())
        self.conn.put_block_blob_from_path(self.container_name, key, filepath,
                                           x_ms_meta_name_values=metadata_to_send)

    def get_or_create_container(self, container_name):
        start_time = time.time()
        self.conn.create_container(container_name)
        self.log.debug("Got/Created container: %r successfully, took: %.3fs", container_name, time.time() - start_time)
        return container_name
Exemple #3
0
def upload_log():

    blob_service = BlobService(account_name=os.getenv('ACC_NAME'),
                               account_key=os.getenv('ACCESS_KEY'))

    fpath = os.path.join(os.getenv('LOGS_DIR'), "log.log")

    blob_service.put_block_blob_from_path('log',
                                          "log.log",
                                          fpath,
                                          x_ms_blob_content_type="text/plain")
Exemple #4
0
def upload_log():

    blob_service = BlobService(account_name=os.getenv('ACC_NAME'), account_key=os.getenv('ACCESS_KEY'))

    fpath = os.path.join(os.getenv('LOGS_DIR'),"log.log")

    blob_service.put_block_blob_from_path(
                    'log',
                    "log.log",
                    fpath,
                    x_ms_blob_content_type="text/plain"
                )
Exemple #5
0
def upload_all_new_azure(local_folder, azure_container, account_name, account_key):



    blob_service = BlobService(account_name=os.getenv('ACC_NAME'), account_key=os.getenv('ACCESS_KEY'))

    blob_list = blob_service.list_blobs(azure_container)

    blob_name_list = [b.name for b in blob_list.blobs]

    blob_name_set = set(blob_name_list)

    #Now for each file in local forlder see whether it's in the s3folder
    
    localfiles = os.listdir(local_folder)
    localfiles = [f for f in localfiles if "~" not in f]
    localfiles = [f for f in localfiles if f[0] != "."]
    localfiles = [f for f in localfiles if (".zip" in f or ".csv" in f)]
    
    localfiles = set(localfiles)

    
    files_to_upload = localfiles - blob_name_set



    orig_len =len(files_to_upload) 
    error_counter = 0
    while len(files_to_upload)>0:
        if error_counter>orig_len:
            logger.error("too many upload failures, exiting")
            sys.exit()
        filename = files_to_upload.pop()

        try:
            blob_service.put_block_blob_from_path(
                'csvs',
                filename,
                os.path.join(local_folder,filename)
            )

        except Exception:
            error_counter +=1
            logging.error(filename + " failed to upload")
            files_to_upload.add(filename)
Exemple #6
0
def upload_all_new_azure(local_folder, azure_container, account_name,
                         account_key):

    blob_service = BlobService(account_name=os.getenv('ACC_NAME'),
                               account_key=os.getenv('ACCESS_KEY'))

    blob_list = blob_service.list_blobs(azure_container)

    blob_name_list = [b.name for b in blob_list.blobs]

    blob_name_set = set(blob_name_list)

    #Now for each file in local forlder see whether it's in the s3folder

    localfiles = os.listdir(local_folder)
    localfiles = [f for f in localfiles if "~" not in f]
    localfiles = [f for f in localfiles if f[0] != "."]
    localfiles = [f for f in localfiles if (".zip" in f or ".csv" in f)]

    localfiles = set(localfiles)

    files_to_upload = localfiles - blob_name_set

    orig_len = len(files_to_upload)
    error_counter = 0
    while len(files_to_upload) > 0:
        if error_counter > orig_len:
            logger.error("too many upload failures, exiting")
            sys.exit()
        filename = files_to_upload.pop()

        try:
            blob_service.put_block_blob_from_path(
                'csvs', filename, os.path.join(local_folder, filename))

        except Exception:
            error_counter += 1
            logging.error(filename + " failed to upload")
            files_to_upload.add(filename)
with open(args.importRules, 'r') as infile:
    images = json.load(infile)

basepath = os.path.dirname(args.importRules)
os.chdir(basepath) 

for image in images:    
    idx = 0;
    for filename in image['files']:
         #First upload the full res images
        basename = str(image['pk']) + "-" + str( idx)
        name = basename + ".jpg"
        print "Uploading " + filename + " to " + name
        blob_service.put_block_blob_from_path(
           container_name,
           name,
           filename,
           x_ms_blob_content_type='image/jpg' )

        # Create and upload thumbnails
        size = 256, 256

        im = Image.open(filename)
        im.thumbnail(size, Image.ANTIALIAS)      
        im.save("tmp-tn.jpg", "JPEG")
        name = basename + "-tn.jpg"
        blob_service.put_block_blob_from_path(
           container_name,
           name,
           "tmp-tn.jpg",
           x_ms_blob_content_type='image/jpg' )
def do_step(context):
    settings = context.meta['settings']
    index_file = context.meta['index-file']
    pivnetAPIToken = settings["pivnet-api-token"]

    f = open("manifests/{0}".format(index_file))
    manifests = yaml.safe_load(f)
    f.close()

    eula_urls = [
        "https://network.pivotal.io/api/v2/products/{0}/releases/{1}/eula_acceptance".format(
            m['release-name'],
            m['release-number']) for m in manifests['manifests']]

    release_urls = [
        "https://network.pivotal.io/api/v2/products/{0}/releases/{1}/product_files/{2}/download".format(
            m['release-name'],
            m['release-number'],
            m['file-number']) for m in manifests['manifests']]

    stemcell_urls = [m['stemcell'] for m in manifests['manifests']]

    # accept eula for each product
    for url in eula_urls:
        print url
        if not "concourse" in url:
            res = authorizedPost(url, pivnetAPIToken)
            code = res.getcode()

    # releases
    is_release_file = re.compile("^releases\/.+")
    if not os.path.exists("/tmp/releases"):
        os.makedirs("/tmp/releases")

    client = bosh_client.BoshClient("https://10.0.0.4:25555", "admin", "admin")
    storage_account_name = settings["STORAGE-ACCOUNT-NAME"]
    storage_access_key = settings["STORAGE-ACCESS-KEY"]

    blob_service = BlobService(storage_account_name, storage_access_key)
    blob_service.create_container(
        container_name='tempreleases',
        x_ms_blob_public_access='container')

    print "Processing releases."
    for url in release_urls:

        print "Downloading {0}.".format(url)

        if "concourse" in url:
            release_url = "https://s3-us-west-2.amazonaws.com/bosh-azure-releases/concourse.zip"
            res = urllib2.urlopen(release_url)
        else:
            res = authorizedPost(url, pivnetAPIToken)

        code = res.getcode()

        length = int(res.headers["Content-Length"])

        # content-length
        if code is 200:

            total = 0
            pcent = 0.0
            CHUNK = 16 * 1024

            with tempfile.TemporaryFile() as temp:
                while True:
                    chunk = res.read(CHUNK)
                    total += CHUNK
                    pcent = (float(total) / float(length)) * 100

                    sys.stdout.write(
                        "Download progress: %.2f%% (%.2fM)\r" %
                        (pcent, total / 1000000.0))
                    sys.stdout.flush()

                    if not chunk:
                        break

                    temp.write(chunk)

                print "Download complete."

                z = zipfile.ZipFile(temp)
                for name in z.namelist():
                    
                    # is this a release?
                    if is_release_file.match(name):

                        release_filename = "/tmp/{0}".format(name)

                        print "Unpacking {0}.".format(name)
                        z.extract(name, "/tmp")

                        print "Uploading {0} to Azure blob store".format(name)

                        blob_service.put_block_blob_from_path(
                            'tempreleases',
                            name,
                            "/tmp/{0}".format(name),
                            x_ms_blob_content_type='application/x-compressed'
                        )

                        os.unlink(release_filename)
                        blob_url = "http://{0}.blob.core.windows.net/{1}/{2}".format(
                            storage_account_name, 'tempreleases', name)

                        print "Uploading release {0} to BOSH director.".format(name)

                        task_id = client.upload_release(blob_url)
                        client.wait_for_task(task_id)

                z.close()
                temp.close()

    blob_service.delete_container("tempreleases")

    # stemcells
    print "Processing stemcells."

    for url in stemcell_urls:
        print "Processing stemcell {0}".format(url)
        task_id = client.upload_stemcell(url)
        client.wait_for_task(task_id)

    return context
Exemple #9
0
class AzureBackend(duplicity.backend.Backend):
    """
    Backend for Azure Blob Storage Service
    """
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        # Import Microsoft Azure Storage SDK for Python library.
        try:
            import azure
            import azure.storage
            if hasattr(azure.storage, 'BlobService'):
                # v0.11.1 and below
                from azure.storage import BlobService
                self.AzureMissingResourceError = azure.WindowsAzureMissingResourceError
                self.AzureConflictError = azure.WindowsAzureConflictError
            else:
                # v1.0.0 and above
                from azure.storage.blob import BlobService
                self.AzureMissingResourceError = azure.common.AzureMissingResourceHttpError
                self.AzureConflictError = azure.common.AzureConflictHttpError
        except ImportError as e:
            raise BackendException("""\
Azure backend requires Microsoft Azure Storage SDK for Python (https://pypi.python.org/pypi/azure-storage/).
Exception: %s""" % str(e))

        if 'AZURE_ACCOUNT_NAME' not in os.environ:
            raise BackendException('AZURE_ACCOUNT_NAME environment variable not set.')
        if 'AZURE_ACCOUNT_KEY' not in os.environ:
            raise BackendException('AZURE_ACCOUNT_KEY environment variable not set.')
        self.blob_service = BlobService(account_name=os.environ['AZURE_ACCOUNT_NAME'],
                                        account_key=os.environ['AZURE_ACCOUNT_KEY'])

        # TODO: validate container name
        self.container = parsed_url.path.lstrip('/')
        try:
            self.blob_service.create_container(self.container, fail_on_exist=True)
        except self.AzureConflictError:
            # Indicates that the resource could not be created because it already exists.
            pass
        except Exception as e:
            log.FatalError("Could not create Azure container: %s"
                           % unicode(e.message).split('\n', 1)[0],
                           log.ErrorCode.connection_failed)

    def _put(self, source_path, remote_filename):
        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#upload-a-blob-into-a-container
        self.blob_service.put_block_blob_from_path(self.container, remote_filename, source_path.name)

    def _get(self, remote_filename, local_path):
        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#download-blobs
        self.blob_service.get_blob_to_path(self.container, remote_filename, local_path.name)

    def _list(self):
        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#list-the-blobs-in-a-container
        blobs = []
        marker = None
        while True:
            batch = self.blob_service.list_blobs(self.container, marker=marker)
            blobs.extend(batch)
            if not batch.next_marker:
                break
            marker = batch.next_marker
        return [blob.name for blob in blobs]

    def _delete(self, filename):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#delete-blobs
        self.blob_service.delete_blob(self.container, filename)

    def _query(self, filename):
        prop = self.blob_service.get_blob_properties(self.container, filename)
        return {'size': int(prop['content-length'])}

    def _error_code(self, operation, e):
        if isinstance(e, self.AzureMissingResourceError):
            return log.ErrorCode.backend_not_found
Exemple #10
0
logtime = time.strftime("%Y%m%d-%H%M%S")
logmessage = "uploadmp3.py started"
command = "sed -i '1s/^/" + logtime + " " + logmessage + "\\n/' /home/pi/selcuk/log.txt"
os.system(command)

from azure.storage import BlobService
blob_service = BlobService(account_name='account_name', account_key='account_key')
blob_service.create_container('record')
blob_service.create_container('record', x_ms_blob_public_access='container')
blob_service.set_container_acl('record', x_ms_blob_public_access='container')

directory = "/home/pi/selcuk/mp3"

os.chdir(directory)
for file in glob.glob("*.mp3"):
    full_path = directory + "/" + file
    blob_service.put_block_blob_from_path(
        'record',
        file,
        full_path,
        x_ms_blob_content_type='audio/mpeg3'
    )
    delete_command = "rm " + file
    os.system(delete_command)

    logtime = time.strftime("%Y%m%d-%H%M%S")
    logmessage = file + " uploaded to cloud and deleted from device"
    command = "sed -i '1s/^/" + logtime + " " + logmessage + "\\n/' /home/pi/selcuk/log.txt"
    os.system(command)
timeStamp = datetime.today().strftime("%Y-%m-%dT%H%M%SZ")

pathToZip = ''
sevenZip = ''
fileName = 'Backup_'+timeStamp+'.7z'
activeContainer = ''

if platform.system() == 'Windows' :
    pathToZip = 'C:\\SQLBackups\\' + fileName
    sevenZip = 'C:\\SQLBackups\\7Zip\\7za.exe'
    activeContainer = regVPS
else:
    # sudo apt-get install p7zip-full
    pathToZip = '/var/tmp/'+fileName
    sevenZip = '7z'
    activeContainer = DOVPS

#Create archive.7z containing Backups Directory with max compression
zipArgs = "a "+ pathToZip +" Backups -mx9"

subprocess.call(sevenZip + " " + zipArgs)

blob_service = BlobService('storageAccountName', 'storageKey')

blob_service.create_container(regVPS)
blob_service.create_container(DOVPS)

blob_service.put_block_blob_from_path(activeContainer, fileName, pathToZip)

os.remove(fileName)
ACCOUNT_NAME = 'sounds'
ACCOUNT_KEY  = AC.getAccountKey() # primary access key
HOST_BASE    = '.blob.core.windows.net'

blob_service = BlobService(account_name=ACCOUNT_NAME,
                           account_key=ACCOUNT_KEY,
                           host_base=HOST_BASE)

CONTAINER = 'bat-detective' # or whatever else you like

created = blob_service.create_container(CONTAINER, x_ms_blob_public_access='container')
print "Created" if created else "Not created (probably already existing)"

audio_dir = '../../data/wav/'
SOUND_FILES = glob.glob(audio_dir + '*.wav')

for f in SOUND_FILES:
    print "uploading", os.path.basename(f)
    blob_service.put_block_blob_from_path(
        CONTAINER,                          # container
        os.path.basename(f),                # blob
        f,                                  # path
        x_ms_blob_content_type='audio/wav'
    )


blobs = blob_service.list_blobs(CONTAINER)

for blob in blobs:
    print(blob.name)
Exemple #13
0
        thread = threading.Thread(target=target)
        thread.start()

        thread.join(timeout)
        if thread.is_alive():
            print 'Terminating process'
            self.process.terminate()
            thread.join()
        print self.process.returncode

#command = Command("echo 'Process started'; sleep 2; echo 'Process finished'")
#print command.run(timeout=3)
#print command.run(timeout=1)
#
#command = Command('ping www.google.com')
#print command.run(timeout=1)

AZURE_STORAGE_CONNECTION_STRING = os.environ['AZURE_STORAGE_CONNECTION_STRING']

blob_service = BlobService(connection_string=AZURE_STORAGE_CONNECTION_STRING)

print blob_service.put_block_blob_from_path( 'nexradl2', '201208/20120810/KSRX/NWS_NEXRAD_NXL2SR_KSRX_20120810050000_20120810055959.tar', '/snfs9/q2/levelii_tarfiles/201208/20120810/KSRX/NWS_NEXRAD_NXL2SR_KSRX_20120810050000_20120810055959.tar', max_connections=5,)


blobs = blob_service.list_blobs('nexradl2',maxresults=10)
for blob in blobs:
    print(blob.name)
    print(blob.url)

# Start looping
while True:
   # Get image path
   img_filename = ''
   img_filepath = 'images/'
   img_filelist = glob.glob(img_filepath + '*.jpg')
   img_filefullpath = get_oldest_file(img_filelist)
   if img_filefullpath is None:
      # There is no image to upload, so sleep
      logging.debug('Image directory is empty.')
      time.sleep(1)
   else:
      img_filename = os.path.basename(img_filefullpath)
      # Upload the oldest image
      blob_service.put_block_blob_from_path(azure_storage_acct_container, img_filename, img_filefullpath, x_ms_blob_content_type='image/jpeg')
      img_azureblob_url = azure_storage_acct_name+'.blob.core.windows.net/'+azure_storage_acct_container+'/'+img_filename
      logging.debug('Uploaded to http://%s', img_azureblob_url)
#      msg = Message(img_azureblob_url)
#      bus_service.send_queue_message(azure_servicebus_queue, msg)
      msg = 'info.newImageUploaded:' + img_azureblob_url
      subprocess.call(["./openiot-agent.bin", "-o", openiot_outbound, "-i", openiot_hardware_id, "-s", openiot_spec_id, "-a", msg])
      # Remove the image after uploading
      os.remove(img_filefullpath)
      # If need to sleep for more than 1 second, sleep
#      if sleep_time > 1:
#         time.sleep(sleep_time-1)

# REST ARE ALL TEST CODE

# List all blobs
Exemple #15
0
class AzureBackend(duplicity.backend.Backend):
    """
    Backend for Azure Blob Storage Service
    """
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        # Import Microsoft Azure SDK for Python library.
        try:
            import azure
            from azure.storage import BlobService
        except ImportError:
            raise BackendException('Azure backend requires Microsoft Azure SDK for Python '
                                   '(https://github.com/Azure/azure-sdk-for-python).')

        if 'AZURE_ACCOUNT_NAME' not in os.environ:
            raise BackendException('AZURE_ACCOUNT_NAME environment variable not set.')

        if 'AZURE_ACCOUNT_KEY' not in os.environ:
            raise BackendException('AZURE_ACCOUNT_KEY environment variable not set.')

        account_name = os.environ['AZURE_ACCOUNT_NAME']
        account_key = os.environ['AZURE_ACCOUNT_KEY']
        self.WindowsAzureMissingResourceError = azure.WindowsAzureMissingResourceError
        self.blob_service = BlobService(account_name=account_name, account_key=account_key)
        # TODO: validate container name
        self.container = parsed_url.path.lstrip('/')
        try:
            self.blob_service.create_container(self.container, fail_on_exist=True)
        except azure.WindowsAzureConflictError:
            # Indicates that the resource could not be created because it already exists.
            pass
        except Exception as e:
            log.FatalError("Could not create Azure container: %s"
                           % unicode(e.message).split('\n', 1)[0],
                           log.ErrorCode.connection_failed)

    def _put(self, source_path, remote_filename):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#upload-blob
        self.blob_service.put_block_blob_from_path(self.container, remote_filename, source_path.name)

    def _get(self, remote_filename, local_path):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#download-blobs
        self.blob_service.get_blob_to_path(self.container, remote_filename, local_path.name)

    def _list(self):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#list-blob
        blobs = self.blob_service.list_blobs(self.container)
        return [blob.name for blob in blobs]

    def _delete(self, filename):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#delete-blobs
        self.blob_service.delete_blob(self.container, filename)

    def _query(self, filename):
        prop = self.blob_service.get_blob_properties(self.container, filename)
        return {'size': int(prop['content-length'])}

    def _error_code(self, operation, e):
        if isinstance(e, self.WindowsAzureMissingResourceError):
            return log.ErrorCode.backend_not_found
Exemple #16
0
        thread.join(timeout)
        if thread.is_alive():
            print 'Terminating process'
            self.process.terminate()
            thread.join()
        print self.process.returncode


#command = Command("echo 'Process started'; sleep 2; echo 'Process finished'")
#print command.run(timeout=3)
#print command.run(timeout=1)
#
#command = Command('ping www.google.com')
#print command.run(timeout=1)

AZURE_STORAGE_CONNECTION_STRING = os.environ['AZURE_STORAGE_CONNECTION_STRING']

blob_service = BlobService(connection_string=AZURE_STORAGE_CONNECTION_STRING)

print blob_service.put_block_blob_from_path(
    'nexradl2',
    '201208/20120810/KSRX/NWS_NEXRAD_NXL2SR_KSRX_20120810050000_20120810055959.tar',
    '/snfs9/q2/levelii_tarfiles/201208/20120810/KSRX/NWS_NEXRAD_NXL2SR_KSRX_20120810050000_20120810055959.tar',
    max_connections=5,
)

blobs = blob_service.list_blobs('nexradl2', maxresults=10)
for blob in blobs:
    print(blob.name)
    print(blob.url)
Exemple #17
0
class AzureTransfer(BaseTransfer):
    def __init__(self, account_name, account_key, container_name, prefix=None):
        # NOTE: Azure wants all paths to start with a slash
        prefix = "/{}".format(prefix.lstrip("/") if prefix else "")
        super().__init__(prefix=prefix)
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = container_name
        self.conn = BlobService(account_name=self.account_name,
                                account_key=self.account_key)
        self.container = self.get_or_create_container(self.container_name)
        self.log.debug("AzureTransfer initialized")
        # XXX: AzureTransfer isn't actively tested and hasn't its error handling is probably lacking
        self.log.warning(
            "AzureTransfer is experimental and has not been thoroughly tested")

    def get_metadata_for_key(self, key):
        key = self.format_key_for_backend(key)
        return self._list_blobs(key)[0]["metadata"]

    def _metadata_for_key(self, key):
        return self._list_blobs(key)[0]["metadata"]

    def list_path(self, key):
        path = self.format_key_for_backend(key, trailing_slash=True)
        return self._list_blobs(path)

    def _list_blobs(self, path):
        self.log.debug("Listing path %r", path)
        items = self.conn.list_blobs(self.container_name,
                                     prefix=path,
                                     delimiter="/",
                                     include="metadata")
        result = []
        for item in items:
            result.append({
                "last_modified":
                dateutil.parser.parse(item.properties.last_modified),
                "metadata":
                item.metadata,
                "name":
                self.format_key_from_backend(item.name),
                "size":
                item.properties.content_length,
            })
        return result

    def delete_key(self, key):
        key = self.format_key_for_backend(key)
        self.log.debug("Deleting key: %r", key)
        return self.conn.delete_blob(self.container_name, key)

    def get_contents_to_file(self, key, filepath_to_store_to):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r to: %r", key,
                       filepath_to_store_to)
        return self.conn.get_blob_to_path(self.container_name, key,
                                          filepath_to_store_to)

    def get_contents_to_fileobj(self, key, fileobj_to_store_to):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r", key)
        return self.conn.get_blob_to_file(self.container_name, key,
                                          fileobj_to_store_to)

    def get_contents_to_string(self, key):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r", key)
        return self.conn.get_blob_to_bytes(self.container_name,
                                           key), self._metadata_for_key(key)

    def store_file_from_memory(self, key, memstring, metadata=None):
        key = self.format_key_for_backend(key)
        # Azure requires all metadata keys and values to be strings
        metadata_to_send = {str(k): str(v) for k, v in metadata.items()}
        self.conn.put_block_blob_from_bytes(
            self.container_name,
            key,
            memstring,
            x_ms_meta_name_values=metadata_to_send)

    def store_file_from_disk(self,
                             key,
                             filepath,
                             metadata=None,
                             multipart=None):
        key = self.format_key_for_backend(key)
        # Azure requires all metadata keys and values to be strings
        metadata_to_send = {str(k): str(v) for k, v in metadata.items()}
        self.conn.put_block_blob_from_path(
            self.container_name,
            key,
            filepath,
            x_ms_meta_name_values=metadata_to_send)

    def get_or_create_container(self, container_name):
        start_time = time.time()
        self.conn.create_container(container_name)
        self.log.debug("Got/Created container: %r successfully, took: %.3fs",
                       container_name,
                       time.time() - start_time)
        return container_name
class BlobServiceAdapter(Component):
    """The :class:`BlobServiceAdapter` class is a thin wrapper over azure.storage.BlobService.

    All the attributes of the wrapper stream are proxied by the adapter so
    it's possible to do ``adapter.create_container()`` instead of the long form
    ``adapter.blob_service.adapter()``.
    """

    def __init__(self):
        self.blob_service = BlobService(
            account_name=self.util.get_config("storage.azure.account_name"),
            account_key=self.util.get_config("storage.azure.account_key"),
            host_base=self.util.get_config("storage.azure.blob_service_host_base"),
        )

    def __getattr__(self, name):
        return getattr(self.blob_service, name)

    def create_container_in_storage(self, container_name, access="container"):
        """create a container if doesn't exist

        :type container_name: str|unicode
        :param container_name: Name of container to create.

        :type access: str|unicode
        :param access: Optional. Possible values include: container, blob
        :return:
        """
        try:
            names = [x.name for x in self.blob_service.list_containers()]
            if container_name not in names:
                return self.blob_service.create_container(container_name, x_ms_blob_public_access=access)
            else:
                self.log.debug("container already exists in storage")
                return True
        except Exception as e:
            self.log.error(e)
            return False

    def upload_file_to_azure(self, container_name, blob_name, stream):
        """
        Creates a new block blob from a file/stream, or updates the content of
        an existing block blob, with automatic chunking and progress
        notifications.

        :type container_name: str|unicode
        :param container_name: Name of existing container.

        :type blob_name: str | unicode
        :param blob_name: Name of blob to create or update.

        :type stream: file
        :param stream: Opened file/stream to upload as the blob content.
        """
        try:
            if self.create_container_in_storage(container_name, "container"):
                self.blob_service.put_block_blob_from_file(container_name, blob_name, stream)
                return self.blob_service.make_blob_url(container_name, blob_name)
            else:
                return None
        except Exception as e:
            self.log.error(e)
            return None

    def upload_file_to_azure_from_bytes(self, container_name, blob_name, blob):
        """
        Creates a new block blob from an array of bytes, or updates the content
        of an existing block blob, with automatic chunking and progress
        notifications.

        :type container_name: str|unicode
        :param container_name: Name of existing container.

        :type blob_name: str|unicode
        :param blob_name: Name of blob to create or update.

        :type blob: bytes
        :param blob: Content of blob as an array of bytes.
        """
        try:
            if self.create_container_in_storage(container_name, "container"):
                self.blob_service.put_block_blob_from_bytes(container_name, blob_name, blob)
                return self.blob_service.make_blob_url(container_name, blob_name)
            else:
                return None
        except Exception as e:
            self.log.error(e)
            return None

    def upload_file_to_azure_from_text(self, container_name, blob_name, text):
        """
        Creates a new block blob from str/unicode, or updates the content of an
        existing block blob, with automatic chunking and progress notifications.

        :type container_name: str|unicode
        :param container_name: Name of existing container.

        :type blob_name: str|unicode
        :param blob_name: Name of blob to create or update.

        :type text: str|unicode
        :param text: Text to upload to the blob.
        """
        try:
            if self.create_container_in_storage(container_name, "container"):
                self.blob_service.put_block_blob_from_text(container_name, blob_name, text)
                return self.blob_service.make_blob_url(container_name, blob_name)
            else:
                return None
        except Exception as e:
            self.log.error(e)
            return None

    def upload_file_to_azure_from_path(self, container_name, blob_name, path):
        """
        Creates a new page blob from a file path, or updates the content of an
        existing page blob, with automatic chunking and progress notifications.

        :type container_name: str|unicode
        :param container_name: Name of existing container.

        :type blob_name: str|unicode
        :param blob_name: Name of blob to create or update.

        :type path: str|unicode
        :param path: Path of the file to upload as the blob content.
        """
        try:
            if self.create_container_in_storage(container_name, "container"):
                self.blob_service.put_block_blob_from_path(container_name, blob_name, path)
                return self.blob_service.make_blob_url(container_name, blob_name)
            else:
                return None
        except Exception as e:
            self.log.error(e)
            return None

    def delete_file_from_azure(self, container_name, blob_name):
        try:
            if self.create_container_in_storage(container_name, "container"):
                self.blob_service.delete_blob(container_name, blob_name)
        except Exception as e:
            self.log.error(e)
            return None
Exemple #19
0
def do_step(context):
    settings = context.meta['settings']
    index_file = context.meta['index-file']
    pivnetAPIToken = settings["pivnet-api-token"]

    f = open("manifests/{0}".format(index_file))
    manifests = yaml.safe_load(f)
    f.close()

    eula_urls = [
        "https://network.pivotal.io/api/v2/products/{0}/releases/{1}/eula_acceptance"
        .format(m['release-name'], m['release-number'])
        for m in manifests['manifests']
    ]

    release_urls = [
        "https://network.pivotal.io/api/v2/products/{0}/releases/{1}/product_files/{2}/download"
        .format(m['release-name'], m['release-number'], m['file-number'])
        for m in manifests['manifests']
    ]

    stemcell_urls = [m['stemcell'] for m in manifests['manifests']]

    # accept eula for each product
    for url in eula_urls:
        print url
        if not "concourse" in url:
            res = authorizedPost(url, pivnetAPIToken)
            code = res.getcode()

    # releases
    is_release_file = re.compile("^releases\/.+")
    if not os.path.exists("/tmp/releases"):
        os.makedirs("/tmp/releases")

    client = bosh_client.BoshClient("https://10.0.0.4:25555", "admin", "admin")
    storage_account_name = settings["STORAGE-ACCOUNT-NAME"]
    storage_access_key = settings["STORAGE-ACCESS-KEY"]

    blob_service = BlobService(storage_account_name, storage_access_key)
    blob_service.create_container(container_name='tempreleases',
                                  x_ms_blob_public_access='container')

    print "Processing releases."
    for url in release_urls:

        print "Downloading {0}.".format(url)

        if "concourse" in url:
            release_url = "https://s3-us-west-2.amazonaws.com/bosh-azure-releases/concourse.zip"
            res = urllib2.urlopen(release_url)
        else:
            res = authorizedPost(url, pivnetAPIToken)

        code = res.getcode()

        length = int(res.headers["Content-Length"])

        # content-length
        if code is 200:

            total = 0
            pcent = 0.0
            CHUNK = 16 * 1024

            with tempfile.TemporaryFile() as temp:
                while True:
                    chunk = res.read(CHUNK)
                    total += CHUNK
                    pcent = (float(total) / float(length)) * 100

                    sys.stdout.write("Download progress: %.2f%% (%.2fM)\r" %
                                     (pcent, total / 1000000.0))
                    sys.stdout.flush()

                    if not chunk:
                        break

                    temp.write(chunk)

                print "Download complete."

                z = zipfile.ZipFile(temp)
                for name in z.namelist():

                    # is this a release?
                    if is_release_file.match(name):

                        release_filename = "/tmp/{0}".format(name)

                        print "Unpacking {0}.".format(name)
                        z.extract(name, "/tmp")

                        print "Uploading {0} to Azure blob store".format(name)

                        blob_service.put_block_blob_from_path(
                            'tempreleases',
                            name,
                            "/tmp/{0}".format(name),
                            x_ms_blob_content_type='application/x-compressed')

                        os.unlink(release_filename)
                        blob_url = "http://{0}.blob.core.windows.net/{1}/{2}".format(
                            storage_account_name, 'tempreleases', name)

                        print "Uploading release {0} to BOSH director.".format(
                            name)

                        task_id = client.upload_release(blob_url)
                        client.wait_for_task(task_id)

                z.close()
                temp.close()

    blob_service.delete_container("tempreleases")

    # stemcells
    print "Processing stemcells."

    for url in stemcell_urls:
        print "Processing stemcell {0}".format(url)
        task_id = client.upload_stemcell(url)
        client.wait_for_task(task_id)

    return context
def upload_to_blob(filename):
	#uploads to azure blob storage
	blob_service = BlobService(account_name=storage_account_name, account_key=storage_account_key)
	blob_service.create_container('pubsubcat-pics')
	blob_service.put_block_blob_from_path("pubsubcat-pics", hostname + "/" + filename, 'temp/' + filename)
class BlobServiceAdapter(Component):
    """The :class:`BlobServiceAdapter` class is a thin wrapper over azure.storage.BlobService.

    All the attributes of the wrapper stream are proxied by the adapter so
    it's possible to do ``adapter.create_container()`` instead of the long form
    ``adapter.blob_service.adapter()``.
    """

    def __init__(self):
        self.blob_service = BlobService(account_name=self.util.get_config("storage.azure.account_name"),
                                        account_key=self.util.get_config("storage.azure.account_key"),
                                        host_base=self.util.get_config("storage.azure.blob_service_host_base"))

    def __getattr__(self, name):
        return getattr(self.blob_service, name)

    def create_container_in_storage(self, container_name, access="container"):
        """create a container if doesn't exist

        :type container_name: str|unicode
        :param container_name: Name of container to create.

        :type access: str|unicode
        :param access: Optional. Possible values include: container, blob
        :return:
        """
        try:
            names = [x.name for x in self.blob_service.list_containers()]
            if container_name not in names:
                return self.blob_service.create_container(container_name, x_ms_blob_public_access=access)
            else:
                self.log.debug("container already exists in storage")
                return True
        except Exception as e:
            self.log.error(e)
            return False

    def upload_file_to_azure(self, container_name, blob_name, stream):
        """
        Creates a new block blob from a file/stream, or updates the content of
        an existing block blob, with automatic chunking and progress
        notifications.

        :type container_name: str|unicode
        :param container_name: Name of existing container.

        :type blob_name: str | unicode
        :param blob_name: Name of blob to create or update.

        :type stream: file
        :param stream: Opened file/stream to upload as the blob content.
        """
        try:
            if self.create_container_in_storage(container_name, 'container'):
                self.blob_service.put_block_blob_from_file(container_name, blob_name, stream)
                return self.blob_service.make_blob_url(container_name, blob_name)
            else:
                return None
        except Exception as e:
            self.log.error(e)
            return None

    def upload_file_to_azure_from_bytes(self, container_name, blob_name, blob):
        """
        Creates a new block blob from an array of bytes, or updates the content
        of an existing block blob, with automatic chunking and progress
        notifications.

        :type container_name: str|unicode
        :param container_name: Name of existing container.

        :type blob_name: str|unicode
        :param blob_name: Name of blob to create or update.

        :type blob: bytes
        :param blob: Content of blob as an array of bytes.
        """
        try:
            if self.create_container_in_storage(container_name, 'container'):
                self.blob_service.put_block_blob_from_bytes(container_name, blob_name, blob)
                return self.blob_service.make_blob_url(container_name, blob_name)
            else:
                return None
        except Exception as e:
            self.log.error(e)
            return None

    def upload_file_to_azure_from_text(self, container_name, blob_name, text):
        """
        Creates a new block blob from str/unicode, or updates the content of an
        existing block blob, with automatic chunking and progress notifications.

        :type container_name: str|unicode
        :param container_name: Name of existing container.

        :type blob_name: str|unicode
        :param blob_name: Name of blob to create or update.

        :type text: str|unicode
        :param text: Text to upload to the blob.
        """
        try:
            if self.create_container_in_storage(container_name, 'container'):
                self.blob_service.put_block_blob_from_text(container_name, blob_name, text)
                return self.blob_service.make_blob_url(container_name, blob_name)
            else:
                return None
        except Exception as e:
            self.log.error(e)
            return None

    def upload_file_to_azure_from_path(self, container_name, blob_name, path):
        """
        Creates a new page blob from a file path, or updates the content of an
        existing page blob, with automatic chunking and progress notifications.

        :type container_name: str|unicode
        :param container_name: Name of existing container.

        :type blob_name: str|unicode
        :param blob_name: Name of blob to create or update.

        :type path: str|unicode
        :param path: Path of the file to upload as the blob content.
        """
        try:
            if self.create_container_in_storage(container_name, 'container'):
                self.blob_service.put_block_blob_from_path(container_name, blob_name, path)
                return self.blob_service.make_blob_url(container_name, blob_name)
            else:
                return None
        except Exception as e:
            self.log.error(e)
            return None

    def delete_file_from_azure(self, container_name, blob_name):
        try:
            if self.create_container_in_storage(container_name, 'container'):
                self.blob_service.delete_blob(container_name, blob_name)
        except Exception as e:
            self.log.error(e)
            return None
Exemple #22
0
class SAzure(SyncStorage):
    def __init__(self):
        super().__init__()
        self.msg_key_na = _('Key not available')
        try:
            import alxlib.key

            key = alxlib.key.Key()
            if os.path.isfile(key.get_path()):
                sys.path.insert(0, key.get_dir())

                import alxkey

                self.key = alxkey.alxkey_azure
                """self.blob = BlobService(account_name=self.key['AZURE_STORAGE_ACCOUNT_NAME'],
                                        account_key=self.key['AZURE_ACCESS_KEY'])"""
            else:
                # raise (self.msg_key_na)
                self.key = None
        except:
            pass
            # raise (self.msg_key_na)

    def connect(self):
        try:

            self.blob = BlobService(account_name=self.key['AZURE_STORAGE_ACCOUNT_NAME'],
                                    account_key=self.key['AZURE_ACCESS_KEY'])

            return self.blob.list_containers(maxresults=1)

        except:
            return None

    def connect_blob(self, az_account_name=None, az_account_key=None):

        try:
            if az_account_name != None:
                self.key['AZURE_STORAGE_ACCOUNT_NAME'] = az_account_name
                self.key['AZURE_ACCESS_KEY'] = az_account_key

            return self.connect()

        except:
            return None

    def path_clean(self, path: str):
        try:
            i = path.index("//") + 2
            self.container = path[0:i]
            if path[len(path) - 1] != "/":
                path += "/"

            return path[i:]
        except:
            print(_("Bad Path"))
            exit(1)

    def spath(self, container, root, b):
        spath = SyncPath()
        spath.BasePath = container
        if b.name[len(b.name)-1]=="/":
            spath.IsDir= True
        else:
            spath.IsFile= True
        spath.AbsPath = b.name
        if len(root)>0:
            spath.SPath = b.name[len(root) - 1:]
        else:
            spath.SPath=b.name
        spath.Size = b.properties.content_length
        import alxlib.time_help

        spath.ModifiedTS = alxlib.time_help.to_timestamp(b.properties.last_modified)
        spath.MD5 = b.properties.content_md5
        spath.sys="azure"
        return spath

    def path_split(self, path: str):
        try:
            list = path.split("/")
            container = list[0]
            uri = ""
            if len(list) > 1:
                uri = "/".join(map(str, list[1:]))

            return container, uri
        except:
            print(_("Bad path"))
            exit(1)

    def path_list_blobs(self, container, uri):

        try:
            if len(uri)>0:
                blobs = self.blob.list_blobs(container, prefix=uri)
            else:
                blobs = self.blob.list_blobs(container)


            """for blob in blobs:
                print(blob.properties.__dict__)
                print(blob.name)
                print(blob.url)"""
            return blobs
        except Exception as e:
            print(_("Bad connection"))
            logging.warning("container {0}, path {1}".format(container, uri))
            exit(1)

    def path_list(self, path):
        try:
            logging.debug("path_list {0}".format(path))

            container, uri = self.path_split(path)
            logging.debug("Container: {0}, Uri: {1}".format(container, uri))

            self.connect()
            self.blob.create_container(container)

            blobs = self.path_list_blobs(container, uri)

            d = {}

            for b in blobs:
                spath = self.spath(container, uri, b)
                # print(b.__dict__)
                #print(str(b.properties.last_modified.__dict__))
                #print(str(spath.ModifiedTS))
                d[spath.SPath] = spath
            # print(d)
            return d
        except Exception as e:
            print(e)

    def remove(self, src: SyncPath):
        try:
            logging.debug("Removing {0}".format(src.AbsPath))
            self.connect()
            self.blob.create_container(src.BasePath)
            self.blob.delete_blob(src.BasePath, src.AbsPath)
        except:
            pass


    def copy_local2azure(self, src, base_dir):
        try:

            container, uri = self.path_split(base_dir)

            if len(src.SPath)>0 and src.SPath[0]=="/":
                path= uri+ src.SPath[1:]
            else:
                path= uri+src.SPath
            logging.debug("copy_local2azure Spath {0}. path:{1}".format(src.SPath, path))
            self.connect()
            if not src.IsDir:
                self.blob.put_block_blob_from_path (container, path, src.AbsPath)
            else:
                self.blob.put_block_blob_from_text(container, path+"/", "")
        except Exception as e:
            print("Error Copying")
            print(e)

    def copy_azure2local(self, src, base_dir):
        try:

            if len(src.SPath)>0 and (src.SPath[0] == "/" or src.SPath[0] == "\\") :
                path = src.SPath[1:]
            else:
                path = src.SPath


            path= os.path.normpath(os.path.join(base_dir, path))
            logging.debug("copy_azure2local basedir:{0} Spath {1}, path {2}, abs: {3}".format( base_dir, src.SPath, path, src.AbsPath))


            if not os.path.isdir(path):
               os.makedirs(os.path.dirname(path), exist_ok=True)
            #print( os.path.dirname(path)+"***************")

            if not (len(src.AbsPath)>0 and src.AbsPath[len(src.AbsPath)-1]=="/"):
                self.blob.get_blob_to_path(src.BasePath, src.AbsPath, path)




            """container, uri = self.path_split(base_dir)

            if len(src.SPath)>0 and src.SPath[0]=="/":
                path= uri+ src.SPath[1:]
            else:
                path= uri+src.SPath
            self.connect()
            if not src.IsDir:
                self.blob.get_blob_to_path(src.BasePath, path, src.AbsPath)
            else:
                self.blob.put_block_blob_from_text(container, path, "")"""
        except Exception as e:
            print("Error copying")
            print(e)
Exemple #23
0
class FileService(Component):
    def __init__(self):
        self.blob_service = None

    def generate_blob_service(self):
        if self.blob_service is None:
            # if storage info doesn't exist in config.py upload file function stop working
            self.blob_service = BlobService(
                account_name=self.util.get_config(
                    "storage.azure.account_name"),
                account_key=self.util.get_config("storage.azure.account_key"),
                host_base=self.util.get_config(
                    "storage.azure.blob_service_host_base"))

    def create_container_in_storage(self, container_name, access):
        """
        create a container if doesn't exist
        :param container_name:
        :param access:
        :return:
        """
        self.generate_blob_service()
        try:
            names = map(lambda x: x.name, self.blob_service.list_containers())
            if container_name not in names:
                self.blob_service.create_container(
                    container_name, x_ms_blob_public_access=access)
            else:
                self.log.debug("container already exsit in storage")
            return True
        except Exception as e:
            self.log.error(e)
            return False

    def upload_file_to_azure(self, stream, container_name, blob_name):
        try:
            if self.create_container_in_storage(container_name, 'container'):
                self.blob_service.put_block_blob_from_file(
                    container_name, blob_name, stream)
                return self.blob_service.make_blob_url(container_name,
                                                       blob_name)
            else:
                return None
        except Exception as e:
            self.log.error(e)
            return None

    def upload_file_to_azure_from_path(self, path, container_name, blob_name):
        try:
            if self.create_container_in_storage(container_name, 'container'):
                self.blob_service.put_block_blob_from_path(
                    container_name, blob_name, path)
                return self.blob_service.make_blob_url(container_name,
                                                       blob_name)
            else:
                return None
        except Exception as e:
            self.log.error(e)
            return None

    def delete_file_from_azure(self, container_name, blob_name):
        try:
            if self.create_container_in_storage(container_name, 'container'):
                self.blob_service.delete_blob(container_name, blob_name)
        except Exception as e:
            self.log.error(e)
            return None
Exemple #24
0
class AzureBackend(duplicity.backend.Backend):
    """
    Backend for Azure Blob Storage Service
    """
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        # Import Microsoft Azure Storage SDK for Python library.
        try:
            import azure
            import azure.storage
            if hasattr(azure.storage, 'BlobService'):
                # v0.11.1 and below
                from azure.storage import BlobService
                self.AzureMissingResourceError = azure.WindowsAzureMissingResourceError
                self.AzureConflictError = azure.WindowsAzureConflictError
            else:
                # v1.0.0 and above
                import azure.storage.blob
                if hasattr(azure.storage.blob, 'BlobService'):
                    from azure.storage.blob import BlobService
                else:
                    from azure.storage.blob.blockblobservice import BlockBlobService as BlobService
                self.AzureMissingResourceError = azure.common.AzureMissingResourceHttpError
                self.AzureConflictError = azure.common.AzureConflictHttpError
        except ImportError as e:
            raise BackendException("""\
Azure backend requires Microsoft Azure Storage SDK for Python (https://pypi.python.org/pypi/azure-storage/).
Exception: %s""" % str(e))

        # TODO: validate container name
        self.container = parsed_url.path.lstrip('/')

        if 'AZURE_ACCOUNT_NAME' not in os.environ:
            raise BackendException('AZURE_ACCOUNT_NAME environment variable not set.')

        if 'AZURE_ACCOUNT_KEY' in os.environ:
            if 'AZURE_ENDPOINT_SUFFIX' in os.environ:
                self.blob_service = BlobService(account_name=os.environ['AZURE_ACCOUNT_NAME'],
                                                account_key=os.environ['AZURE_ACCOUNT_KEY'],
                                                endpoint_suffix=os.environ['AZURE_ENDPOINT_SUFFIX'])
            else:
                self.blob_service = BlobService(account_name=os.environ['AZURE_ACCOUNT_NAME'],
                                                account_key=os.environ['AZURE_ACCOUNT_KEY'])
            self._create_container()
        elif 'AZURE_SHARED_ACCESS_SIGNATURE' in os.environ:
            if 'AZURE_ENDPOINT_SUFFIX' in os.environ:
                self.blob_service = BlobService(account_name=os.environ['AZURE_ACCOUNT_NAME'],
                                                sas_token=os.environ['AZURE_SHARED_ACCESS_SIGNATURE'],
                                                endpoint_suffix=os.environ['AZURE_ENDPOINT_SUFFIX'])
            else:
                self.blob_service = BlobService(account_name=os.environ['AZURE_ACCOUNT_NAME'],
                                                sas_token=os.environ['AZURE_SHARED_ACCESS_SIGNATURE'])
        else:
            raise BackendException(
                'Neither AZURE_ACCOUNT_KEY nor AZURE_SHARED_ACCESS_SIGNATURE environment variable not set.')

        if globals.azure_max_single_put_size:
            # check if we use azure-storage>=0.30.0
            try:
                _ = self.blob_service.MAX_SINGLE_PUT_SIZE
                self.blob_service.MAX_SINGLE_PUT_SIZE = globals.azure_max_single_put_size
            # fallback for azure-storage<0.30.0
            except AttributeError:
                self.blob_service._BLOB_MAX_DATA_SIZE = globals.azure_max_single_put_size

        if globals.azure_max_block_size:
            # check if we use azure-storage>=0.30.0
            try:
                _ = self.blob_service.MAX_BLOCK_SIZE
                self.blob_service.MAX_BLOCK_SIZE = globals.azure_max_block_size
            # fallback for azure-storage<0.30.0
            except AttributeError:
                self.blob_service._BLOB_MAX_CHUNK_DATA_SIZE = globals.azure_max_block_size

    def _create_container(self):
        try:
            self.blob_service.create_container(self.container, fail_on_exist=True)
        except self.AzureConflictError:
            # Indicates that the resource could not be created because it already exists.
            pass
        except Exception as e:
            log.FatalError("Could not create Azure container: %s"
                           % unicode(e.message).split('\n', 1)[0],
                           log.ErrorCode.connection_failed)

    def _put(self, source_path, remote_filename):
        kwargs = {}
        if globals.azure_max_connections:
            kwargs['max_connections'] = globals.azure_max_connections

        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#upload-a-blob-into-a-container
        try:
            self.blob_service.create_blob_from_path(self.container, remote_filename, source_path.name, **kwargs)
        except AttributeError:  # Old versions use a different method name
            self.blob_service.put_block_blob_from_path(self.container, remote_filename, source_path.name, **kwargs)

    def _get(self, remote_filename, local_path):
        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#download-blobs
        self.blob_service.get_blob_to_path(self.container, remote_filename, local_path.name)

    def _list(self):
        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#list-the-blobs-in-a-container
        blobs = []
        marker = None
        while True:
            batch = self.blob_service.list_blobs(self.container, marker=marker)
            blobs.extend(batch)
            if not batch.next_marker:
                break
            marker = batch.next_marker
        return [blob.name for blob in blobs]

    def _delete(self, filename):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#delete-blobs
        self.blob_service.delete_blob(self.container, filename)

    def _query(self, filename):
        prop = self.blob_service.get_blob_properties(self.container, filename)
        try:
            info = {'size': int(prop.properties.content_length)}
        except AttributeError:
            # old versions directly returned the properties
            info = {'size': int(prop['content-length'])}
        return info

    def _error_code(self, operation, e):
        if isinstance(e, self.AzureMissingResourceError):
            return log.ErrorCode.backend_not_found
Exemple #25
0
class AzureIOStore(IOStore):
    """
    A class that lets you get input from and send output to Azure Storage.
    
    """
    
    def __init__(self, account_name, container_name, name_prefix=""):
        """
        Make a new AzureIOStore that reads from and writes to the given
        container in the given account, adding the given prefix to keys. All
        paths will be interpreted as keys or key prefixes.
        
        If the name prefix does not end with a trailing slash, and is not empty,
        one will be added automatically.
        
        Account keys are retrieved from the AZURE_ACCOUNT_KEY environment
        variable or from the ~/.toilAzureCredentials file, as in Toil itself.
        
        """
        
        # Make sure azure libraries actually loaded
        assert(have_azure)
        
        self.account_name = account_name
        self.container_name = container_name
        self.name_prefix = name_prefix
        
        if self.name_prefix != "" and not self.name_prefix.endswith("/"):
            # Make sure it has the trailing slash required.
            self.name_prefix += "/"
        
        # Sneak into Toil and use the same keys it uses
        self.account_key = toil.jobStores.azureJobStore._fetchAzureAccountKey(
            self.account_name)
            
        # This will hold out Azure blob store connection
        self.connection = None
        
    def __getstate__(self):
        """
        Return the state to use for pickling. We don't want to try and pickle
        an open Azure connection.
        """
     
        return (self.account_name, self.account_key, self.container_name, 
            self.name_prefix)
        
    def __setstate__(self, state):
        """
        Set up after unpickling.
        """
        
        self.account_name = state[0]
        self.account_key = state[1]
        self.container_name = state[2]
        self.name_prefix = state[3]
        
        self.connection = None
        
    def __connect(self):
        """
        Make sure we have an Azure connection, and set one up if we don't.
        """
        
        if self.connection is None:
            RealTimeLogger.get().debug("Connecting to account {}, using "
                "container {} and prefix {}".format(self.account_name,
                self.container_name, self.name_prefix))
        
            # Connect to the blob service where we keep everything
            self.connection = BlobService(
                account_name=self.account_name, account_key=self.account_key)
            
            
    def read_input_file(self, input_path, local_path):
        """
        Get input from Azure.
        """
        
        self.__connect()
        
        
        RealTimeLogger.get().debug("Loading {} from AzureIOStore".format(
            input_path))
        
        # Download the blob. This is known to be synchronous, although it can
        # call a callback during the process.
        self.connection.get_blob_to_path(self.container_name,
            self.name_prefix + input_path, local_path)
            
    def list_input_directory(self, input_path, recursive=False):
        """
        Loop over fake /-delimited directories on Azure. The prefix may or may
        not not have a trailing slash; if not, one will be added automatically.
        
        Returns the names of files and fake directories in the given input fake
        directory, non-recursively.
        
        """
        
        self.__connect()
        
        RealTimeLogger.get().info("Enumerating {} from AzureIOStore".format(
            input_path))
        
        # Work out what the directory name to list is
        fake_directory = self.name_prefix + input_path
        
        if fake_directory != "" and not fake_directory.endswith("/"):
            # We have a nonempty prefix, and we need to end it with a slash
            fake_directory += "/"
        
        # This will hold the marker that we need to send back to get the next
        # page, if there is one. See <http://stackoverflow.com/a/24303682>
        marker = None
        
        # This holds the subdirectories we found; we yield each exactly once if
        # we aren't recursing.
        subdirectories = set()
        
        while True:
        
            # Get the results from Azure. We skip the delimiter since it doesn't
            # seem to have the placeholder entries it's suppsoed to.
            result = self.connection.list_blobs(self.container_name, 
                prefix=fake_directory, marker=marker)
                
            for blob in result:
                # Yield each result's blob name, but directory names only once
                
                # Drop the common prefix
                relative_path = blob.name[len(fake_directory):]
                
                if (not recursive) and "/" in relative_path:
                    # We found a file in a subdirectory, and we aren't supposed
                    # to be recursing.
                    subdirectory, _ = relative_path.split("/", 1)
                    
                    if subdirectory not in subdirectories:
                        # It's a new subdirectory. Yield and remember it
                        subdirectories.add(subdirectory)
                        
                        yield subdirectory
                else:
                    # We found an actual file  
                    yield relative_path
                
            # Save the marker
            marker = result.next_marker
                
            if not marker:
                break 
    
    def write_output_file(self, local_path, output_path):
        """
        Write output to Azure. Will create the container if necessary.
        """
        
        self.__connect()
        
        RealTimeLogger.get().debug("Saving {} to AzureIOStore".format(
            output_path))
        
        try:
            # Make the container
            self.connection.create_container(self.container_name)
        except azure.WindowsAzureConflictError:
            # The container probably already exists
            pass
        
        # Upload the blob (synchronously)
        # TODO: catch no container error here, make the container, and retry
        self.connection.put_block_blob_from_path(self.container_name,
            self.name_prefix + output_path, local_path)
            
    def exists(self, path):
        """
        Returns true if the given input or output file exists in Azure already.
        
        """
        
        self.__connect()
        
        marker = None
        
        while True:
        
            # Get the results from Azure.
            result = self.connection.list_blobs(self.container_name, 
                prefix=self.name_prefix + path, marker=marker)
                
            for blob in result:
                # Look at each blob
                
                if blob.name == self.name_prefix + path:
                    # Found it
                    return True
                
            # Save the marker
            marker = result.next_marker
                
            if not marker:
                break 
        
        return False
Exemple #26
0
from azure.storage import BlobService

bs = BlobService(
    account_name='xhpi',
    account_key=
    'LdfaFhlJpL8xJE3iFhgExDU8VRmA6s5M7b2s0Ztd2CrxX+6MVsFTHilHOJ3TuUCffluyaOgP7jYj8Na5J8g3+A=='
)

bs.put_block_blob_from_path('xhpicloud', 'pic.jpg',
                            '/home/pi/blobfile/pic.jpg')

bs.put_block_blob_from_path('xhpicloud', 'picpu.txt',
                            '/home/pi/blobfile/picpu.txt')

bs.put_block_blob_from_path('xhpicloud', 'envtemp.txt',
                            '/home/pi/blobfile/envtemp.txt')
container_name = 'matchfeatures'

with open(args.importRules, 'r') as infile:
    images = json.load(infile)

basepath = os.path.dirname(args.importRules)
os.chdir(basepath)

for image in images:
    idx = 0
    for filename in image['files']:
        #First upload the full res images
        basename = str(image['pk']) + "-" + str(idx)
        name = basename + ".jpg"
        print "Uploading " + filename + " to " + name
        blob_service.put_block_blob_from_path(
            container_name, name, filename, x_ms_blob_content_type='image/jpg')

        # Create and upload thumbnails
        size = 256, 256

        im = Image.open(filename)
        im.thumbnail(size, Image.ANTIALIAS)
        im.save("tmp-tn.jpg", "JPEG")
        name = basename + "-tn.jpg"
        blob_service.put_block_blob_from_path(
            container_name,
            name,
            "tmp-tn.jpg",
            x_ms_blob_content_type='image/jpg')

        idx += 1
Exemple #28
0
print("-----------------------")

if not os.path.exists(presen_path): raise IOError(presen_path + "が見つかりません")
if not os.path.exists(os.path.join(presen_path, "img")): raise IOError("imgディレクトリが見つかりません")
if not os.path.exists(os.path.join(presen_path, "css")): raise IOError("cssディレクトリが見つかりません")

for dirpath, dirnames, filenames in os.walk(presen_path):
    for filename in filenames:
        targetfile = os.path.join(dirpath, filename)
        if ".gitignore" in targetfile: continue
        blobname = re.sub(presen_path + r"\\", \
          "servicebuilding/servicebuilding%s/presentation/" % id, \
          targetfile)\
          .replace("\\", "/")

        print("%s -> %s" % (targetfile, blobname), end="")
        try:
            blob_service = BlobService(\
              "welmokpilog",\
              "=LfXCQBPcj4u313vfz+mx+pGC2fWwnhAo+2UW5SVAnAqIjYBEPt76oievOM3LpV35BwYCYi6ufeSBRZCs/h3c8Q==")
            blob_service.put_block_blob_from_path(\
              "test-data-resources",\
              blobname,\
              targetfile)
        except:
            print("    [ERROR]")
            print("Unexpected error : ", sys.exc_info()[0])
            raise
        print("    [SUCCEED]")
print("finish move data.")
class AzureBackend(duplicity.backend.Backend):
    u"""
    Backend for Azure Blob Storage Service
    """
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        # Import Microsoft Azure Storage SDK for Python library.
        try:
            import azure
            import azure.storage
            if hasattr(azure.storage, u'BlobService'):
                # v0.11.1 and below
                from azure.storage import BlobService
                self.AzureMissingResourceError = azure.WindowsAzureMissingResourceError
                self.AzureConflictError = azure.WindowsAzureConflictError
            else:
                # v1.0.0 and above
                import azure.storage.blob
                if hasattr(azure.storage.blob, u'BlobService'):
                    from azure.storage.blob import BlobService
                else:
                    from azure.storage.blob.blockblobservice import BlockBlobService as BlobService
                self.AzureMissingResourceError = azure.common.AzureMissingResourceHttpError
                self.AzureConflictError = azure.common.AzureConflictHttpError
        except ImportError as e:
            raise BackendException(u"""\
Azure backend requires Microsoft Azure Storage SDK for Python (https://pypi.python.org/pypi/azure-storage/).
Exception: %s""" % str(e))

        # TODO: validate container name
        self.container = parsed_url.path.lstrip(u'/')

        if u'AZURE_ACCOUNT_NAME' not in os.environ:
            raise BackendException(u'AZURE_ACCOUNT_NAME environment variable not set.')

        if u'AZURE_ACCOUNT_KEY' in os.environ:
            if u'AZURE_ENDPOINT_SUFFIX' in os.environ:
                self.blob_service = BlobService(account_name=os.environ[u'AZURE_ACCOUNT_NAME'],
                                                account_key=os.environ[u'AZURE_ACCOUNT_KEY'],
                                                endpoint_suffix=os.environ[u'AZURE_ENDPOINT_SUFFIX'])
            else:
                self.blob_service = BlobService(account_name=os.environ[u'AZURE_ACCOUNT_NAME'],
                                                account_key=os.environ[u'AZURE_ACCOUNT_KEY'])
            self._create_container()
        elif u'AZURE_SHARED_ACCESS_SIGNATURE' in os.environ:
            if u'AZURE_ENDPOINT_SUFFIX' in os.environ:
                self.blob_service = BlobService(account_name=os.environ[u'AZURE_ACCOUNT_NAME'],
                                                sas_token=os.environ[u'AZURE_SHARED_ACCESS_SIGNATURE'],
                                                endpoint_suffix=os.environ[u'AZURE_ENDPOINT_SUFFIX'])
            else:
                self.blob_service = BlobService(account_name=os.environ[u'AZURE_ACCOUNT_NAME'],
                                                sas_token=os.environ[u'AZURE_SHARED_ACCESS_SIGNATURE'])
        else:
            raise BackendException(
                u'Neither AZURE_ACCOUNT_KEY nor AZURE_SHARED_ACCESS_SIGNATURE environment variable not set.')

        if globals.azure_max_single_put_size:
            # check if we use azure-storage>=0.30.0
            try:
                _ = self.blob_service.MAX_SINGLE_PUT_SIZE
                self.blob_service.MAX_SINGLE_PUT_SIZE = globals.azure_max_single_put_size
            # fallback for azure-storage<0.30.0
            except AttributeError:
                self.blob_service._BLOB_MAX_DATA_SIZE = globals.azure_max_single_put_size

        if globals.azure_max_block_size:
            # check if we use azure-storage>=0.30.0
            try:
                _ = self.blob_service.MAX_BLOCK_SIZE
                self.blob_service.MAX_BLOCK_SIZE = globals.azure_max_block_size
            # fallback for azure-storage<0.30.0
            except AttributeError:
                self.blob_service._BLOB_MAX_CHUNK_DATA_SIZE = globals.azure_max_block_size

    def _create_container(self):
        try:
            self.blob_service.create_container(self.container, fail_on_exist=True)
        except self.AzureConflictError:
            # Indicates that the resource could not be created because it already exists.
            pass
        except Exception as e:
            log.FatalError(u"Could not create Azure container: %s"
                           % str(e.message).split(u'\n', 1)[0],
                           log.ErrorCode.connection_failed)

    def _put(self, source_path, remote_filename):
        remote_filename = fsdecode(remote_filename)
        kwargs = {}
        if globals.azure_max_connections:
            kwargs[u'max_connections'] = globals.azure_max_connections

        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#upload-a-blob-into-a-container
        try:
            self.blob_service.create_blob_from_path(self.container, remote_filename, source_path.name, **kwargs)
        except AttributeError:  # Old versions use a different method name
            self.blob_service.put_block_blob_from_path(self.container, remote_filename, source_path.name, **kwargs)

        self._set_tier(remote_filename)

    def _set_tier(self, remote_filename):
        if globals.azure_blob_tier is not None:
            try:
                self.blob_service.set_standard_blob_tier(self.container, remote_filename, globals.azure_blob_tier)
            except AttributeError:  # might not be available in old API
                pass

    def _get(self, remote_filename, local_path):
        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#download-blobs
        self.blob_service.get_blob_to_path(self.container, fsdecode(remote_filename), local_path.name)

    def _list(self):
        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#list-the-blobs-in-a-container
        blobs = []
        marker = None
        while True:
            batch = self.blob_service.list_blobs(self.container, marker=marker)
            blobs.extend(batch)
            if not batch.next_marker:
                break
            marker = batch.next_marker
        return [blob.name for blob in blobs]

    def _delete(self, filename):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#delete-blobs
        self.blob_service.delete_blob(self.container, fsdecode(filename))

    def _query(self, filename):
        prop = self.blob_service.get_blob_properties(self.container, fsdecode(filename))
        try:
            info = {u'size': int(prop.properties.content_length)}
        except AttributeError:
            # old versions directly returned the properties
            info = {u'size': int(prop[u'content-length'])}
        return info

    def _error_code(self, operation, e):
        if isinstance(e, self.AzureMissingResourceError):
            return log.ErrorCode.backend_not_found
Exemple #30
0
class AzureTransfer(BaseTransfer):
    def __init__(self, account_name, account_key, container_name, prefix=None):
        # NOTE: Azure wants all paths to start with a slash
        prefix = "/{}".format(prefix.lstrip("/") if prefix else "")
        super().__init__(prefix=prefix)
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = container_name
        self.conn = BlobService(account_name=self.account_name, account_key=self.account_key)
        self.container = self.get_or_create_container(self.container_name)
        self.log.debug("AzureTransfer initialized")
        # XXX: AzureTransfer isn't actively tested and hasn't its error handling is probably lacking
        self.log.warning("AzureTransfer is experimental and has not been thoroughly tested")

    def get_metadata_for_key(self, key):
        key = self.format_key_for_backend(key)
        return self._list_blobs(key)[0]["metadata"]

    def _metadata_for_key(self, key):
        return self._list_blobs(key)[0]["metadata"]

    def list_path(self, key):
        path = self.format_key_for_backend(key, trailing_slash=True)
        return self._list_blobs(path)

    def _list_blobs(self, path):
        self.log.debug("Listing path %r", path)
        items = self.conn.list_blobs(self.container_name, prefix=path, delimiter="/", include="metadata")
        result = []
        for item in items:
            result.append({
                "last_modified": dateutil.parser.parse(item.properties.last_modified),
                "metadata": item.metadata,
                "name": self.format_key_from_backend(item.name),
                "size": item.properties.content_length,
            })
        return result

    def delete_key(self, key):
        key = self.format_key_for_backend(key)
        self.log.debug("Deleting key: %r", key)
        return self.conn.delete_blob(self.container_name, key)

    def get_contents_to_file(self, key, filepath_to_store_to):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r to: %r", key, filepath_to_store_to)
        return self.conn.get_blob_to_path(self.container_name, key, filepath_to_store_to)

    def get_contents_to_fileobj(self, key, fileobj_to_store_to):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r", key)
        return self.conn.get_blob_to_file(self.container_name, key, fileobj_to_store_to)

    def get_contents_to_string(self, key):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r", key)
        return self.conn.get_blob_to_bytes(self.container_name, key), self._metadata_for_key(key)

    def store_file_from_memory(self, key, memstring, metadata=None):
        key = self.format_key_for_backend(key)
        # Azure requires all metadata keys and values to be strings
        metadata_to_send = {str(k): str(v) for k, v in metadata.items()}
        self.conn.put_block_blob_from_bytes(self.container_name, key, memstring,
                                            x_ms_meta_name_values=metadata_to_send)

    def store_file_from_disk(self, key, filepath, metadata=None, multipart=None):
        key = self.format_key_for_backend(key)
        # Azure requires all metadata keys and values to be strings
        metadata_to_send = {str(k): str(v) for k, v in metadata.items()}
        self.conn.put_block_blob_from_path(self.container_name, key, filepath,
                                           x_ms_meta_name_values=metadata_to_send)

    def get_or_create_container(self, container_name):
        start_time = time.time()
        self.conn.create_container(container_name)
        self.log.debug("Got/Created container: %r successfully, took: %.3fs", container_name, time.time() - start_time)
        return container_name
Exemple #31
0
if not os.path.exists(presen_path): raise IOError(presen_path + "が見つかりません")
if not os.path.exists(os.path.join(presen_path, "img")):
    raise IOError("imgディレクトリが見つかりません")
if not os.path.exists(os.path.join(presen_path, "css")):
    raise IOError("cssディレクトリが見つかりません")

for dirpath, dirnames, filenames in os.walk(presen_path):
    for filename in filenames:
        targetfile = os.path.join(dirpath, filename)
        if ".gitignore" in targetfile: continue
        blobname = re.sub(presen_path + r"\\", \
          "servicebuilding/servicebuilding%s/presentation/" % id, \
          targetfile)\
          .replace("\\", "/")

        print("%s -> %s" % (targetfile, blobname), end="")
        try:
            blob_service = BlobService(\
              "welmokpilog",\
              "=LfXCQBPcj4u313vfz+mx+pGC2fWwnhAo+2UW5SVAnAqIjYBEPt76oievOM3LpV35BwYCYi6ufeSBRZCs/h3c8Q==")
            blob_service.put_block_blob_from_path(\
              "test-data-resources",\
              blobname,\
              targetfile)
        except:
            print("    [ERROR]")
            print("Unexpected error : ", sys.exc_info()[0])
            raise
        print("    [SUCCEED]")
print("finish move data.")
Exemple #32
0
from azure.storage import BlobService

bs=BlobService(account_name='xhpi',account_key='LdfaFhlJpL8xJE3iFhgExDU8VRmA6s5M7b2s0Ztd2CrxX+6MVsFTHilHOJ3TuUCffluyaOgP7jYj8Na5J8g3+A==')

bs.put_block_blob_from_path('xhpicloud','pic.jpg','/home/pi/blobfile/pic.jpg')

bs.put_block_blob_from_path('xhpicloud','picpu.txt','/home/pi/blobfile/picpu.txt')

bs.put_block_blob_from_path('xhpicloud','envtemp.txt','/home/pi/blobfile/envtemp.txt')
from azure.storage import BlobService 

mycontainer = 'images'
myblob = 'python.jpg'
myfile = 'c:\\demo\\AzureStorageSDK\\AzureStorageSDKDemo.jpg'

blob_service = BlobService(
    account_name='dev018storage', 
    account_key='q4dLJhW3zLw8hmdjL88w0vc2KHrbjMJlXumFB0eZVpBoeks1MKlZh7+pW36fmlWmBMx+bzwvWcYMU8p4MGhbcg=='
)

blob_service.put_block_blob_from_path(
    mycontainer,
    myblob,
    myfile,
    x_ms_blob_content_type='image/png'
)

print "\n\""+myblob+"\" Uploaded by Python!\n"