Beispiel #1
0
    def open_stack_connection(
        self, storage
    ):  #storage argument is used to differentiate whether a compute or storage driver is returned
        USER = '******'
        API_KEY = 'q4HknWgHiazLyaGw'
        AUTH_URL = 'http://128.136.179.2:5000'
        '''IMPORTANT: local directory for the security certificate should be changed to suit'''
        libcloud.security.CA_CERTS_PATH = ['E:/School/ca-bundle.crt']

        if storage:
            from libcloud.storage.types import Provider
            from libcloud.storage.providers import get_driver
            provider = get_driver((Provider.OPENSTACK_SWIFT))
        else:
            from libcloud.compute.types import Provider
            from libcloud.compute.providers import get_driver
            provider = get_driver(Provider.OPENSTACK)
        '''driver connection established here'''
        driver = provider(USER,
                          API_KEY,
                          ex_force_auth_url=AUTH_URL,
                          ex_force_auth_version='2.0_password',
                          ex_tenant_name='cormac.finnegan',
                          ex_force_service_region='RegionOne')
        return driver
Beispiel #2
0
def put_s3(i, args):
    print 'Thread %4d: Start' % i
    cf_driver = get_driver(Provider.CLOUDFILES_US)
    cf = cf_driver(args.cf_username, args.cf_password,
                   ex_force_service_region=args.cf_region)
    cf_cont = cf.get_container(args.cf_container)

    s3_driver = get_driver(Provider.S3)
    s3 = s3_driver(args.s3_access_id, args.s3_access_key)
    s3_cont = s3.get_container(args.s3_container)

    while 1:
        try:
            obj = queue.get_nowait()
        except gevent.queue.Empty:
            print 'Thread %4d: Queue empty' % i
            raise gevent.GreenletExit
        else:
            obj.driver = cf
            obj.container = cf_cont

            print 'Thread %4d: Upload %s' % (i, obj.name)
            obj_stream = obj.as_stream()
            s3_cont.upload_object_via_stream(obj_stream, obj.name,
                                             extra=obj.extra)
            print 'Thread %4d: Upload complete %s' % (i, obj.name)
    print 'Thread %4d: Complete' % i
def get_driver_helper(provider_name):
    if provider_name == 'cloudfiles':
        return get_driver(Provider.CLOUDFILES_US)
    elif provider_name == 's3':
        return get_driver(Provider.S3)
    else:
        print 'Unknown provider %s!' % provider_name
        sys.exit(1)
Beispiel #4
0
def get_driver_helper(provider_name):
    if provider_name == 'cloudfiles':
        return get_driver(Provider.CLOUDFILES_US)
    elif provider_name == 's3':
        return get_driver(Provider.S3)
    else:
        print 'Unknown provider %s!' % provider_name
        sys.exit(1)
Beispiel #5
0
 def _azure_client(self) -> Container:
     driver = get_driver(self._provider)
     client = driver(self._account,
                     self._key,
                     host=self._host,
                     secure=self._secure)
     return client.get_container(self._container)
Beispiel #6
0
def html(revision_id, path="index.html"):
    container = Container(
        app.config["BUCKET"],
        None,
        get_driver(Provider.CLOUDFILES)(
            app.config["RACKSPACE_USER"],
            app.config["RACKSPACE_APIKEY"],
            region=app.config["RACKSPACE_REGION"],
        ),
    )

    # See if the requested file exists
    try:
        obj = container.get_object(
            "html/{revision}/{path}".format(revision=revision_id, path=path)
        )
    except ObjectDoesNotExistError:
        abort(404)

    # Get the requested file
    data = b""
    for chunk in obj.as_stream():
        data += chunk

    return data, 200, {k.replace("_", "-"): v for k, v in obj.extra.items()}
Beispiel #7
0
def upload_file(file_path):
    """
    :param file_path: Path to the coverage file to upload.
    """
    if not os.path.isfile(file_path):
        raise ValueError("File %s doesn't exist" % (file_path))

    print("Uploading coverage file to S3")

    cls = get_driver(Provider.S3)
    driver = cls(ACCESS_KEY_ID, ACCESS_KEY_SECRET, region="us-east-1")

    file_name = os.path.basename(file_path)

    # We also attach random number + timestamp to file name to avoid conflicts
    now = str(int(time.time()))
    random_number = random.randint(0, 100000)

    object_name = "%s/%s/%s.%s.%s" % (
        CIRCLE_CI_BRANCH,
        CIRCLE_CI_COMMIT,
        file_name,
        now,
        random_number,
    )

    container = driver.get_container(container_name=BUCKET_NAME)
    obj = container.upload_object(file_path=file_path, object_name=object_name)

    print(("Object uploaded to: %s/%s" % (BUCKET_NAME, object_name)))
    print(obj)
Beispiel #8
0
    def __init__(self, storage_info: StorageInfo):

        if bool(storage_info.provider):
            provider_id = PROVIDER_ALIASES.get(
                storage_info.provider,
                storage_info.provider
            )
            if provider_id not in DRIVERS.keys():
                raise IndexError('Invalid object strage provider string.')

        self.driver = get_driver(provider_id)(
            storage_info.key,
            storage_info.secret
        )

        self.temporary_dir = tempfile.TemporaryDirectory()

        try:
            self.container = self.driver.get_container(
                container_name=storage_info.bucket
            )
        except ContainerDoesNotExistError:
            if storage_info.create_container:
                self.container = self.driver.create_container(
                        container_name=storage_info.bucket
                    )
            else:
                self.container = None
Beispiel #9
0
def upload_file(file_path):
    if not os.path.isfile(file_path):
        raise ValueError("File %s doesn't exist" % (file_path))

    print("Uploading pricing data files to S3")

    cls = get_driver(Provider.S3)
    driver = cls(ACCESS_KEY_ID, ACCESS_KEY_SECRET, region=AWS_REGION)

    file_paths = [
        file_path,
        "%s.sha256" % (file_path),
        "%s.sha512" % (file_path),
    ]

    for file_path in file_paths:
        file_name = os.path.basename(file_path)
        object_name = file_name

        container = driver.get_container(container_name=BUCKET_NAME)
        obj = container.upload_object(file_path=file_path,
                                      object_name=object_name)

        print(("Object uploaded to: %s/%s" % (BUCKET_NAME, object_name)))
        print(obj)
Beispiel #10
0
    def __enter__(self):
        self._env = os.environ.copy()

        if self._config.api_profile:
            self._env['AWS_PROFILE'] = self._config.api_profile

        if self._config.key_file:
            self._env['AWS_SHARED_CREDENTIALS_FILE'] = self._config.key_file

        if self._config.region and self._config.region != "default":
            self._env['AWS_REGION'] = self._config.region
        elif self._config.storage_provider not in [Provider.S3, "s3_compatible"] and self._config.region == "default":
            # Legacy libcloud S3 providers that were tied to a specific region such as s3_us_west_oregon
            self._env['AWS_REGION'] = get_driver(self._config.storage_provider).region_name

        if self._config.aws_cli_path == 'dynamic':
            self._aws_cli_cmd = self.cmd()
        else:
            self._aws_cli_cmd = [self._config.aws_cli_path]

        self.endpoint_url = None
        if self._config.host is not None:
            self.endpoint_url = '{}:{}'.format(self._config.host, self._config.port) \
                if self._config.port is not None else self._config.host
            if utils.evaluate_boolean(self._config.secure):
                self.endpoint_url = 'https://{}'.format(self.endpoint_url)
            else:
                self.endpoint_url = 'http://{}'.format(self.endpoint_url)

        return self
Beispiel #11
0
    def upload_obj(self, audio_file_path, metadata):
        """Uploads a file into Amazon S3 object storage.
        
        Before a file is uploaded onto Amazon S3 we generate a unique object
        name consisting of the filename and a unqiue string using the uuid4
        module.
        
        Keyword arguments:
            audio_file_path: Path on disk to the audio file that is about to be
                             uploaded to Amazon S3 object storage.
            metadata: ID3 tags and other metadata extracted from the audio file.
            
        Returns:
            The metadata dictionary it received with three new keys:
                filesize: The file's filesize in bytes.
                filename: The file's filename.
                resource_id: The unique object name used to identify the objects
                             on Amazon S3 
        """

        file_base_name = os.path.basename(audio_file_path)
        file_name, extension = os.path.splitext(file_base_name)

        # With Amazon S3 you cannot create a signed url if there are spaces
        # in the object name. URL encoding the object name doesn't solve the
        # problem. As a solution we will replace spaces with dashes.
        file_name = file_name.replace(" ", "-")
        object_name = "%s_%s%s" % (file_name, str(uuid.uuid4()), extension)

        provider_driver_class = get_driver(getattr(Provider, self._provider))
        driver = provider_driver_class(self._api_key, self._api_key_secret)

        try:
            container = driver.get_container(self._bucket)
        except ContainerDoesNotExistError:
            container = driver.create_container(self._bucket)

        extra = {'meta_data': {'filename': file_base_name}}

        obj = driver.upload_object(file_path=audio_file_path,
                                   container=container,
                                   object_name=object_name,
                                   verify_hash=False,
                                   extra=extra)

        metadata["filesize"] = os.path.getsize(audio_file_path)

        # Remove file from organize directory
        try:
            os.remove(audio_file_path)
        except OSError:
            logging.info("Could not remove %s from organize directory" %
                         audio_file_path)

        # Pass original filename to Airtime so we can store it in the db
        metadata["filename"] = file_base_name

        metadata["resource_id"] = object_name
        metadata["storage_backend"] = self._storage_backend
        return metadata
 def __init__(self, provider_name, option=None):
     self.provider = settings.LIBCLOUD_PROVIDERS.get(provider_name)
     if not self.provider:
         raise ImproperlyConfigured(
             'LIBCLOUD_PROVIDERS %s not define or invalid' % provider_name)
     try:
         Driver = get_driver(self.provider['type'])
         
         user = self.provide.get('user', None)
         key = self.provider.get('key', None)
         
         if provider_name == 'google' and not user:
             self.driver = Driver(
                 key,
                 secret=self.provider['secret'],
                 project=self.provider['project']
                 )
         else:
             self.driver = Driver(
                 user,
                 key,
                 )
     except Exception, e:
         raise ImproperlyConfigured(
             "Unable to create libcloud driver type %s" % \
             (self.provider.get('type'), e))
Beispiel #13
0
 def _get_storage(uri):
     """
     Given a URI like local:///srv/repo or s3://key:secret/apt.example.com,
     return a libcloud storage or container object.
     """
     driver = get_driver(uri.scheme)
     key = uri.username
     secret = uri.password
     container = uri.netloc
     if uri.scheme.startswith('s3'):
         if not key:
             key = os.environ.get('AWS_ACCESS_KEY_ID')
         if not secret:
             secret = os.environ.get('AWS_SECRET_ACCESS_KEY')
         if not (key and secret and container):
             raise ValueError('For S3 you must provide an access key ID, secret access key, and bucket name')
     elif uri.scheme == 'local':
         parts = []
         if uri.netloc:
             parts.append(uri.netloc)
         if uri.path:
             parts.append(uri.path)
         if not parts:
             parts.append('.')
         base_path = os.path.abspath(''.join(parts))
         key = os.path.dirname(base_path)
         container = os.path.basename(base_path)
     storage = driver(key, secret)
     try:
         return storage.get_container(container)
     except ContainerDoesNotExistError:
         return storage.create_container(container)
Beispiel #14
0
    def handle_noargs(self, **options):
        try:
            cls = get_driver(Provider.AZURE_BLOBS)
            driver = cls(settings.AZURE_ACCOUNT_NAME, settings.AZURE_ACCOUNT_KEY)
            container = driver.get_container(container_name=settings.AZURE_CONTAINER)

            # .filter(upload_date__lte=datetime.today() - timedelta(days=180)) \
            mixes = (
                Mix.objects.exclude(archive_path__isnull=False)
                .annotate(num_plays=Count("activity_plays"))
                .order_by("num_plays")
            )
            for mix in mixes:
                if os.path.isfile(mix.get_absolute_path()):
                    print "Uploading file for: %s" % mix.slug
                    file_name = "%s.%s" % (mix.uid, mix.filetype)
                    archive_path = url_path_join(settings.AZURE_ITEM_BASE_URL, settings.AZURE_CONTAINER, file_name)

                    with open(mix.get_absolute_path(), "rb") as iterator:
                        obj = driver.upload_object_via_stream(
                            iterator=iterator, container=container, object_name=file_name
                        )
                        print "Uploaded"
                        mix.archive_path = archive_path
                        mix.save()

                        expired_path = join(settings.MEDIA_ROOT, "mixes/archived")
                        new_file = os.path.join(expired_path, basename(iterator.name))
                        os.rename(iterator.name, new_file)

                    print "done- file is %s" % mix.archive_path

        except Exception, ex:
            print "Debug exception: %s" % ex.message
Beispiel #15
0
 def get_driver_by_region(self, region_name):
     Driver = get_driver(Provider.S3)
     s3_driver = Driver(self.credentials["accessId"],
                        self.credentials["secretKey"],
                        region=region_name)
     self.s3_driver = s3_driver
     return s3_driver
    def __init__(self, provider_name=None, option=None):
        if provider_name is None:
            provider_name = getattr(settings, 'DEFAULT_LIBCLOUD_PROVIDER', 'default')

        self.provider = settings.LIBCLOUD_PROVIDERS.get(provider_name)
        if not self.provider:
            raise ImproperlyConfigured(
                'LIBCLOUD_PROVIDERS %s not defined or invalid' % provider_name)
        extra_kwargs = {}
        if 'region' in self.provider:
            extra_kwargs['region'] = self.provider['region']
        try:
            provider_type = self.provider['type']
            if isinstance(provider_type, basestring):
                module_path, tag = provider_type.rsplit('.', 1)
                if module_path != 'libcloud.storage.types.Provider':
                    raise ValueError("Invalid module path")
                provider_type = getattr(Provider, tag)

            Driver = get_driver(provider_type)
            self.driver = Driver(
                self.provider['user'],
                self.provider['key'],
                **extra_kwargs
            )
        except Exception as e:
            raise ImproperlyConfigured(
                "Unable to create libcloud driver type %s: %s" % \
                (self.provider.get('type'), e))
        self.bucket = self.provider['bucket']   # Limit to one container
Beispiel #17
0
    def __init__(self, config=None):
        self.provider = config
        extra_kwargs = {}
        if 'region' in self.provider:
            extra_kwargs['region'] = self.provider['region']
        try:
            provider_type = self.provider['type']
            if isinstance(provider_type, str):
                module_path, tag = provider_type.rsplit('.', 1)
                if module_path != 'libcloud.storage.types.Provider':
                    raise ValueError("Invalid module path")
                provider_type = getattr(Provider, tag)

            Driver = get_driver(provider_type)
            self.driver = Driver(
                self.provider['user'],
                self.provider['key'],
                **extra_kwargs
            )
        except Exception as e:
            raise ImproperlyConfigured(
                "Unable to create libcloud driver type %s: %s" %
                (self.provider.get('type'), e))
        self.bucket = self.provider['bucket']   # Limit to one container
        self.base_url = self.provider['base_url']
Beispiel #18
0
    def __init__(self, provider_name=None, option=None):
        if provider_name is None:
            provider_name = getattr(settings, 'DEFAULT_LIBCLOUD_PROVIDER',
                                    'default')

        self.provider = settings.LIBCLOUD_PROVIDERS.get(provider_name)
        if not self.provider:
            raise ImproperlyConfigured(
                'LIBCLOUD_PROVIDERS %s not defined or invalid' % provider_name)
        try:
            provider_type = self.provider['type']
            if isinstance(provider_type, str):
                module_path, tag = provider_type.rsplit('.', 1)
                if module_path != 'libcloud.storage.types.Provider':
                    raise ValueError("Invalid module path")
                provider_type = getattr(Provider, tag)

            Driver = get_driver(provider_type)
            self.driver = Driver(
                self.provider['user'],
                self.provider['key'],
            )
        except Exception as e:
            raise ImproperlyConfigured(
                "Unable to create libcloud driver type %s: %s" % \
                (self.provider.get('type'), e))
        self.bucket = self.provider['bucket']  # Limit to one container
Beispiel #19
0
    def driver(self):
        try:
            cloud_driver = get_driver(self._cloud_name)
            param_dict = self.get_driver_params()
        except:
            raise ValueError

        return cloud_driver(**param_dict)
Beispiel #20
0
def _get_storage_service() -> Container:
    driver_class = get_driver(getattr(Provider, STORAGE_PROVIDER))
    storage_driver = driver_class(STORAGE_KEY, STORAGE_SECRET)
    try:
        storage_container = storage_driver.create_container(STORAGE_CONTAINER)
    except ContainerAlreadyExistsError:
        storage_container = storage_driver.get_container(STORAGE_CONTAINER)
    return storage_container
Beispiel #21
0
def openqrm_lc_get_connection(params):

	if params['PROVIDER'] == 'EC2_EU_WEST':
		Driver = get_driver(Provider.EC2_EU_WEST)
		conn = Driver(params['ACCESS_ID'], params['SECRET_KEY'])
		return conn
	elif params['PROVIDER'] == 'EC2_US_EAST':
		Driver = get_driver(Provider.EC2_US_EAST)
		conn = Driver(params['ACCESS_ID'], params['SECRET_KEY'])
		return conn
	elif params['PROVIDER'] == 'OPENSTACK':
		OpenStack = get_driver(Provider.OPENSTACK_SWIFT)
		Driver = OpenStack(params['USERNAME'], params['PASSWORD'],
			ex_force_auth_url='http://192.168.0.1:5000/v2.0',
			ex_force_service_name='swift',
			eex_force_service_type='object-store')
		return Driver
Beispiel #22
0
def main():
    Ali = get_driver(Provider.ALIYUN_STORAGE)
    driver_ali = Ali("hqxxyywptpn3juer4zd5rods",
                     "WfUMI6vw28r0GD4gwPtNRpS/unU=")

    Azure = get_driver(Provider.WINDOWS_AZURE_STORAGE)
    driver_azure_us = Azure(
        "mlibcloud",
        "qdLKg2Cu1cWOkItbqb6gTl1WcOxvA9ED3fPo1KbSwdKw9ApJMhVEbyklurrBK23r8pTf6ajLN9tueSj5gVpiNQ=="
    )

    GoogleStorage = get_driver(Provider.GOOGLE_STORAGE)
    driver_google_storage = GoogleStorage(
        "GOOGULXCXRFPGQNEFPTE", "ys9om0uf2dYlXov4NOjO8jzGXLdtR7pwv9/nIK1V")

    S3_US_WEST = get_driver(Provider.S3_US_WEST)
    driver_s3_us_west = S3_US_WEST("AKIAITLX6IDDU5VTNAPA",
                                   "Pi0BhJiVan/l6a2+Yg9JVxrNvZSTRMGIx39XWAGq")

    NineFold = get_driver(Provider.NINEFOLD)
    driver_ninefold = NineFold(
        "f9946e04515a46cf98a998f2cb34dd3b/mlibcloud_1328774465274",
        "fRRs33RyQOmVOrB38UNqV+R3uAM=")

    S3_AP_SOUTHEAST = get_driver(Provider.S3_AP_SOUTHEAST)
    driver_s3_ap_southeast = S3_AP_SOUTHEAST(
        "AKIAITLX6IDDU5VTNAPA", "Pi0BhJiVan/l6a2+Yg9JVxrNvZSTRMGIx39XWAGq")

    S3_AP_NORTHEAST = get_driver(Provider.S3_AP_NORTHEAST)
    driver_s3_ap_southeast = S3_AP_NORTHEAST(
        "AKIAITLX6IDDU5VTNAPA", "Pi0BhJiVan/l6a2+Yg9JVxrNvZSTRMGIx39XWAGq")

    Cloudfiles_UK = get_driver(Provider.CLOUDFILES_UK)
    driver_cloudfiles_uk = Cloudfiles_UK("mlibcloud0",
                                         "d544e3b4a183ba4d07777be6e6ce0b77")

    Cloudfiles_US = get_driver(Provider.CLOUDFILES_US)
    driver_cloudfiles_us = Cloudfiles_US("mlibcloud",
                                         "5140858194409ed2dd2ec13e008ac754")

    driver = GroupDriver([
        driver_ali, driver_azure_us, driver_google_storage, driver_s3_us_west,
        driver_cloudfiles_uk
    ])
    driver.set_original_share(3)
    driver.set_total_share(5)
    driver.set_block_size(512)
    container_name = "mlibcloud35"
    object_name = 'Beijing_1_16384'
    object = driver.get_object(container_name, object_name)
    driver.delete_object(object)
def authorize_google():
    cls = get_driver(Provider.GOOGLE_STORAGE)
    googleStorageDriver = cls(os.environ.get('GOOGLE_CLIENT_EMAIL'),
                              os.environ.get('GOOGLE_STORAGE_KEY_FILE'))

    googleContainer = googleStorageDriver.get_container(
        os.environ.get('DEFAULT_BUCKET_NAME'))
    return googleStorageDriver, googleContainer
Beispiel #24
0
 def __init__(self):
     self.driver = get_driver(
         getattr(
             Provider,
             self.driver_name
         )
     )(**self.driver_options)
     self._container = None
Beispiel #25
0
 def connect_storage(self):
     aws_config = configparser.ConfigParser(interpolation=None)
     with io.open(os.path.expanduser(self.config.key_file), 'r', encoding='utf-8') as aws_file:
         aws_config.read_file(aws_file)
         aws_profile = self.config.api_profile
         profile = aws_config[aws_profile]
         cls = get_driver(self.config.storage_provider)
         driver = cls(profile['aws_access_key_id'], profile['aws_secret_access_key'])
         return driver
Beispiel #26
0
 def __init__(self, account_name, access_key, container='cmdata'):
     '''
     Initialize Azure storage driver.
     Todo: create container if it doesn't exist.
     '''
     driver = get_driver(Provider.AZURE_BLOBS)
     self._driver = driver(key=account_name, secret=access_key)
     self._container = self._driver.get_container(container_name=container)
     self._container_url = self._container.extra.get('url').replace(
         'http', 'https')
 def __init__(self):
     self.driver = get_driver(
         getattr(
             Provider,
             self.driver_name
         )
     )(**self.driver_options)
     self.container = self.driver.get_container(
         container_name=self.container_name
     )
    def connect_storage(self):
        """
        Connects to AWS S3 storage using EC2 driver

        :return driver: EC2 driver object
        """
        aws_security_token = ''
        aws_instance_profile = self.get_aws_instance_profile()

        # Authentication via environment variables
        if 'AWS_ACCESS_KEY_ID' in os.environ and \
                'AWS_SECRET_ACCESS_KEY' in os.environ:
            logging.debug("Reading AWS credentials from Environment Variables:")
            aws_access_key_id = os.environ['AWS_ACCESS_KEY_ID']
            aws_secret_access_key = os.environ['AWS_SECRET_ACCESS_KEY']

            # Access token for credentials fetched from STS service:
            if 'AWS_SECURITY_TOKEN' in os.environ:
                aws_security_token = os.environ['AWS_SECURITY_TOKEN']

        # or authentication via IAM Role credentials
        elif aws_instance_profile:
            logging.debug('Reading AWS credentials from IAM Role: %s', aws_instance_profile.text)
            url = "http://169.254.169.254/latest/meta-data/iam/security-credentials/" + aws_instance_profile.text
            try:
                auth_data = requests.get(url).json()
            except requests.exceptions.RequestException:
                logging.error('Can\'t fetch AWS IAM Role credentials.')
                sys.exit(1)

            aws_access_key_id = auth_data['AccessKeyId']
            aws_secret_access_key = auth_data['SecretAccessKey']
            aws_security_token = auth_data['Token']

        # or authentication via AWS credentials file
        elif self.config.key_file and os.path.exists(os.path.expanduser(self.config.key_file)):
            logging.debug("Reading AWS credentials from {}".format(
                self.config.key_file
            ))

            aws_config = configparser.ConfigParser(interpolation=None)
            with io.open(os.path.expanduser(self.config.key_file), 'r', encoding='utf-8') as aws_file:
                aws_config.read_file(aws_file)
                aws_profile = self.config.api_profile
                profile = aws_config[aws_profile]
                aws_access_key_id = profile['aws_access_key_id']
                aws_secret_access_key = profile['aws_secret_access_key']
        else:
            raise NotImplementedError("No valid method of AWS authentication provided.")

        cls = get_driver(self.config.storage_provider)
        driver = cls(
            aws_access_key_id, aws_secret_access_key, token=aws_security_token
        )
        return driver
Beispiel #29
0
def libcloud_local(fs):
    key = '/location'
    Path(key).mkdir(parents=True)
    cls = get_driver(Provider.LOCAL)
    driver = cls(key)
    print(f'fixture: tmp dir before yield: {tempfile.gettempdir()} - exists:  {Path(tempfile.gettempdir()).exists()}')
    yield driver
    print(f'fixture: tmp dir after yield: {tempfile.gettempdir()} - exists:  {Path(tempfile.gettempdir()).exists()}')
    # cleanup containers
    for container in driver.iterate_containers():
        driver.delete_container(container)
Beispiel #30
0
    def cloudy(self):
        S3_ACCESS_ID = os.getenv('AWS_ACCESS_KEY_ID')
        S3_SECRET_KEY = os.getenv('AWS_SECRET_ACCESS_KEY')

        Driver = get_driver(Provider.S3)
        conn = Driver(S3_ACCESS_ID, S3_SECRET_KEY)

        containers = conn.list_containers()
        print('%d containers:' % len(containers))
        for container in containers:
            print(' - %s' % container.name)
    def cloudy(self):
        S3_ACCESS_ID = os.getenv('AWS_ACCESS_KEY_ID')
        S3_SECRET_KEY = os.getenv('AWS_SECRET_ACCESS_KEY')

        Driver = get_driver(Provider.S3)
        conn = Driver(S3_ACCESS_ID, S3_SECRET_KEY)

        containers = conn.list_containers()
        print('%d containers:' % len(containers))
        for container in containers:
            print(' - %s' % container.name)
def _get_driver(profile):
    config = __salt__["config.option"]("libcloud_storage")[profile]
    cls = get_driver(config["driver"])
    args = config.copy()
    del args["driver"]
    args["key"] = config.get("key")
    args["secret"] = config.get("secret", None)
    args["secure"] = config.get("secure", True)
    args["host"] = config.get("host", None)
    args["port"] = config.get("port", None)
    return cls(**args)
Beispiel #33
0
def _get_driver(profile):
    config = __salt__['config.option']('libcloud_storage')[profile]
    cls = get_driver(config['driver'])
    args = config.copy()
    del args['driver']
    args['key'] = config.get('key')
    args['secret'] = config.get('secret', None)
    args['secure'] = config.get('secure', True)
    args['host'] = config.get('host', None)
    args['port'] = config.get('port', None)
    return cls(**args)
def authorize_aws_s3():
    cls = get_driver(Provider.S3)
    awsS3storageDriver = cls(os.environ.get('AWS_ACCESS_KEY_ID'),
                             os.environ.get('AWS_SECRET_ACCESS_KEY'),
                             region=os.environ.get('AWS_DEFAULT_REGION'))

    s3Bucket = awsS3storageDriver.get_container(
        container_name=os.environ.get('AWS_BUCKET'))
    extra = {'content_type': 'application/octet-stream'}

    return awsS3storageDriver, s3Bucket, extra
Beispiel #35
0
def main():
    key_file = 'mlibcloud_keys'
    key_dict = read_keys_from_file(key_file)
    Google = get_driver(Provider.GOOGLE_STORAGE)
    S3 = get_driver(Provider.S3_US_WEST)
    Aliyun = get_driver(Provider.ALIYUN_STORAGE)

    driver_cf = Google(key_dict['GOOGLE_STORAGE'][0],
                       key_dict['GOOGLE_STORAGE'][1])
    driver_s3 = S3(key_dict['S3_US_WEST'][0], key_dict['S3_US_WEST'][1])
    driver_ali = Aliyun(key_dict['ALIYUN_STORAGE'][0],
                        key_dict['ALIYUN_STORAGE'][1])

    driver = GroupDriver([driver_cf, driver_s3, driver_ali])

    upload = True
    download = False
    delete = False

    if upload:
        #upload
        driver.set_original_share(2)
        driver.set_total_share(3)
        driver.set_block_size(512)

        container_name = 'fuckyou-mlibcloud'
        file_path = '/home/pin/debug/fuckyou'
        obj_name = 'fuckyou'

        driver.create_container(container_name)
        containers = driver.get_container(container_name)
        driver.upload_object(file_path, containers, obj_name)

    if download:
        #dwonload
        dest_path = '/home/pin/debug/'
        container_name = 'fuckyou-mlibcloud'
        obj_name = 'fuckyou'

        mobj = driver.get_object(container_name, obj_name)
        driver.download_object(mobj, dest_path)
Beispiel #36
0
def connect(options):
	driver_opt = dict(options)

	# Some drivers does not support unknown options
	# Here, we remove those used by libcloud and let the rest pass through
	for opt in ('buckets_exclude', 'accurate', 'nb_prefetcher', 'prefetch_size', 'queue_size', 'provider', 'buckets_include', 'debug'):
		if opt in options:
			del driver_opt[opt]

	provider = getattr(Provider, options['provider'])
	driver = get_driver(provider)(**driver_opt)
	return driver
Beispiel #37
0
    def __init__(self):
        # Should MD5 be used for checksums. Can be disabled by drives as
        # it is often broken on gateways (like minio)
        self.verify_hash = True

        driver = os.getenv('OBJECTSTORE')
        if driver == 'gcs':
            cls = get_driver(Provider.GOOGLE_STORAGE)
            self.driver = cls(os.getenv('GOOGLE_ACCOUNT'),
                              os.getenv('AUTH_TOKEN'),
                              os.getenv('GOOGLE_PROJECT'))

        elif driver == 'abs':
            cls = get_driver(Provider.AZURE_BLOBS)
            self.driver = cls(key=os.getenv('AZURE_ACCOUNT'),
                              secret=os.getenv('AZURE_KEY'))

        elif driver == 'minio':
            # Minio storage gateway. More or less S3, but with disabled MD5 checking.
            cls = get_driver(Provider.S3)
            if os.getenv('MINIO_TLS') == 'FALSE':
                sec = False
            else:
                sec = True
            self.verify_hash = False
            self.driver = cls(key=os.getenv('MINIO_KEY'),
                              secret=os.getenv('MINIO_SECRET'),
                              host=os.getenv('MINIO_HOST', 'localhost'),
                              port=int(os.getenv('MINIO_PORT', '9000')),
                              secure=sec
                              )
        elif driver == 's3':
            # connect to AWS here.
            self.driver = driver
            raise NotImplementedError('S3 storage not yet supported')
        elif driver == 'local':
            cls = get_driver(Provider.LOCAL)
            self.driver = cls(os.getenv('LOCAL_FOLDER'))
        else:
            raise NotImplementedError('Unknown storage provider')
Beispiel #38
0
 def connect_storage(self):
     oss_config = configparser.ConfigParser(interpolation=None)
     with io.open(os.path.expanduser(self.config.key_file),
                  'r',
                  encoding='utf-8') as oss_file:
         oss_config.read_file(oss_file)
         oss_profile = self.config.api_profile
         profile = oss_config[oss_profile]
         cls = get_driver(self.config.storage_provider)
         driver = cls(profile['endpoint'], profile['accessKeyID'],
                      profile['accessKeySecret'])
         #driver.get_container
         return driver
Beispiel #39
0
    def __init__(self):
        driver = os.getenv('OBJECTSTORE')
        if (driver == 'gcs'):
            cls = get_driver(Provider.GOOGLE_STORAGE)
            self.driver = cls(os.getenv('GOOGLE_ACCOUNT'),
                              os.getenv('AUTH_TOKEN'),
                              project='mottak2')

        elif (driver == 's3'):
            # connect to AWS here.
            self.driver = driver
        else:
            raise Exception('Unknown storage provider')
Beispiel #40
0
    def _create_s3_cmd(self):
        cmd = [self._aws_cli_path]

        if self.endpoint_url is not None:
            cmd.extend(["--endpoint-url", self.endpoint_url])

        if self._config.region is not None and self._config.region != "default":
            cmd.extend(["--region", self._config.region])
        elif not (self._config.storage_provider in [Provider.S3, "s3_compatible"]) and self._config.region == "default":
            # Legacy libcloud S3 providers that were tied to a specific region such as s3_us_west_oregon
            cmd.extend(["--region", get_driver(self._config.storage_provider).region_name])

        return cmd
    def setup(self):
        print "mounting test filesystem"
        args = shlex.split(self.MOUNT_CMD)
        p = subprocess.Popen(args)
        p.wait()

        if 0 != p.returncode:
            print >>sys.stderr, "failed to mount filesystem"
            sys.exit(1)

        print "setting up libcloud storage connection"

        self.storage_handle = get_driver(getattr(Provider, driver))(access_id, secret)
        print self.storage_handle
Beispiel #42
0
def create_destination(args):
    cf_driver = get_driver(Provider.CLOUDFILES_US)
    cf = cf_driver(args.cf_username, args.cf_password,
                   ex_force_service_region=args.cf_region)
    s3_driver = get_driver(Provider.S3)
    s3 = s3_driver(args.s3_access_id, args.s3_access_key)

    if args.from_service == 'S3':
        try:
            cf.create_container(args.cf_container)
        except libcloud.storage.types.ContainerAlreadyExistsError:
            pass
        except Exception as e:
            raise SystemExit(e)
        return s3.get_container(args.s3_container)
    elif args.from_service == 'CF':
        try:
            s3.create_container(args.s3_container)
        except (libcloud.storage.types.ContainerAlreadyExistsError):
            pass
        except Exception as e:
            raise SystemExit(e)
        return cf.get_container(args.cf_container)
 def __init__(self, provider_name, option=None):
     self.provider = settings.LIBCLOUD_PROVIDERS.get(provider_name)
     if not self.provider:
         raise ImproperlyConfigured(
             'LIBCLOUD_PROVIDERS %s not define or invalid' % provider_name)
     try:
         Driver = get_driver(self.provider['type'])
         self.driver = Driver(
             self.provider['user'],
             self.provider['key'],
             )
     except Exception, e:
         raise ImproperlyConfigured(
             "Unable to create libcloud driver type %s" % \
             (self.provider.get('type'), e))
Beispiel #44
0
    def __init__(self, provider_name=None, option=None):
        if provider_name is None:
            provider_name = getattr(settings, "DEFAULT_LIBCLOUD_PROVIDER", "default")

        self.provider = settings.LIBCLOUD_PROVIDERS.get(provider_name)
        if not self.provider:
            raise ImproperlyConfigured("LIBCLOUD_PROVIDERS %s not defined or invalid" % provider_name)
        try:
            provider_type = self.provider["type"]
            if isinstance(provider_type, basestring):
                module_path, tag = provider_type.rsplit(".", 1)
                if module_path != "libcloud.storage.types.Provider":
                    raise ValueError("Invalid module path")
                provider_type = getattr(Provider, tag)

            Driver = get_driver(provider_type)
            self.driver = Driver(self.provider["user"], self.provider["key"])
        except Exception, e:
            raise ImproperlyConfigured("Unable to create libcloud driver type %s: %s" % (self.provider.get("type"), e))
Beispiel #45
0
def get_driver_class(provider):
    """
    Return the driver class
    :param provider: str - provider name
    :return:
    """
    if "." in provider:
        parts = provider.split('.')
        kls = parts.pop()
        path = '.'.join(parts)
        module = import_module(path)
        if not hasattr(module, kls):
            raise ImportError('{0} provider not found at {1}'.format(
                kls,
                path))
        driver = getattr(module, kls)
    else:
        driver = getattr(Provider, provider.upper())
    return get_driver(driver)
Beispiel #46
0
def test_cloud_master_key_store_s3(request, tmpdir):
    try:
        access_key = request.config.getoption('--aws-access-key')
        secret_key = request.config.getoption('--aws-secret-key')
        bucket_name = request.config.getoption('--aws-s3-bucket')
    except ValueError:
        access_key = secret_key = bucket_name = None
    if access_key is None or secret_key is None or bucket_name is None:
        skip(
            '--aws-access-key/--aws-secret-key/--aws-s3-bucket are not '
            'provided; skipped'
        )
    driver_cls = get_driver(Provider.S3)
    driver = driver_cls(access_key, secret_key)
    container = driver.get_container(container_name=bucket_name)
    tmpname = ''.join(map('{:02x}'.format, os.urandom(16)))
    s = CloudMasterKeyStore(driver, container, tmpname)
    key = RSAKey.generate(1024)
    # load() -- when not exists
    with raises(EmptyStoreError):
        s.load()
    try:
        # save()
        s.save(key)
        obj = driver.get_object(container.name, tmpname)
        dest = tmpdir / tmpname
        obj.download(str(dest))
        saved = read_private_key_file(dest.open())
        assert isinstance(saved, RSAKey)
        assert saved.get_base64() == key.get_base64()
        # load() -- when exists
        loaded = s.load()
        assert isinstance(loaded, RSAKey)
        assert loaded.get_base64() == key.get_base64()
    finally:
        try:
            o = driver.get_object(container.name, tmpname)
        except ObjectDoesNotExistError:
            pass
        else:
            o.delete()
Beispiel #47
0
    def backup(self, dest_provider, dest_type, host):
        import subprocess
        from datetime import datetime

        from libcloud.storage.types import Provider, ContainerDoesNotExistError
        from libcloud.storage.providers import get_driver
        import libcloud.security

        # TODO Make this optional
        libcloud.security.VERIFY_SSL_CERT = False

        print host

        driver = get_driver(getattr(Provider, dest_provider))(self.config.destination_key,
                                  self.config.destination_secret)

        directory = expand_paths([self.config.source_name])
        cmd = 'tar cvzpf - %s' % (' '.join(directory))

        object_name = '%s-%s.tar.gz' % (self.config.destination_prefix, datetime.now().strftime('%Y-%m-%d'))
        container_name = self.config.destination_container

        # Create a container if it doesn't already exist
        try:
            container = driver.get_container(container_name=container_name)
        except ContainerDoesNotExistError:
            container = driver.create_container(container_name=container_name)

        pipe = subprocess.Popen(cmd, bufsize=0, shell=True, stdout=subprocess.PIPE)
        return_code = pipe.poll()

        print 'Uploading object...'

        while return_code is None:
            # Compress data in our directory and stream it directly to CF
            obj = container.upload_object_via_stream(iterator=pipe.stdout,
                                                    object_name=object_name)
            return_code = pipe.poll()

        print 'Upload complete, transferred: %s KB' % ((obj.size / 1024))
Beispiel #48
0
    def __init__(self, **options):
        """ Build a Cloud Provider. """
        super(DefaultCloudProvider, self).__init__(**options)
        try:
            cls = get_driver(self._name)
            self._driver = cls(options['id'], options['key'])

            self._container = self._driver.get_container(self._container_name)
            logger.info(
                "Connected to %s container from %s provider.",
                self._container_name, self._name)

        except AttributeError:
            msg = "Provider %s doesn't exists in detours."
            self._handle_error(BadProviderError, msg, self._name)

        except ContainerDoesNotExistError:
            msg = "Container %s does not exists in %s provider."
            self._handle_error(
                BadContainerError, msg, self._container_name, self._name)

        except Exception:
            msg = "Fatal error while creating %s provider."
            self._handle_error(CloudDetoursError, msg, self._name)
Beispiel #49
0
from __future__ import print_function
from libcloud.storage.types import Provider
from libcloud.storage.providers import get_driver

auth_username = '******'
auth_password = '******'
auth_url = 'http://controller:5000'
project_name = 'your_project_name_or_id'
region_name = 'your_region_name'

provider = get_driver(Provider.OPENSTACK_SWIFT)
swift = provider(auth_username,
                 auth_password,
                 ex_force_auth_url=auth_url,
                 ex_force_auth_version='2.0_password',
                 ex_tenant_name=project_name,
                 ex_force_service_region=region_name)

container_name = 'fractals'
container = swift.create_container(container_name=container_name)
print(container)

print(swift.list_containers())

file_path = 'goat.jpg'
object_name = 'an amazing goat'
container = swift.get_container(container_name=container_name)
object = container.upload_object(file_path=file_path, object_name=object_name)

objects = container.list_objects()
print(objects)
Beispiel #50
0
 def _get_driver(cls, name):
     """Wrapper for libcloud's get_driver for testing."""
     return get_driver(name)
#!/usr/bin/env python
# Code samples for blog post
# http://mikethetechie.com/post/6975966936/controlling-the-environment-cloud-control-apis
#
# Note - I couldn't get this to work. For more info, see http://mikethetechie.com/private/6969971577/tumblr_lng0mzd5bg1qf6p57
import secrets # import user name RACKSPACE_USER and API key RACKSPACE_KEY

from libcloud.storage.types import Provider
from libcloud.storage.providers import get_driver
from pprint import pprint




Driver = get_driver(Provider.CLOUDFILES_UK)
conn2 = Driver(secrets.RACKSPACE_USER, secrets.RACKSPACE_KEY, 'https://lon.auth.api.rackspacecloud.com/v1.0')


containers = conn2.list_containers()
container_objects = conn2.list_container_objects(containers[0])

pprint(containers)
pprint(container_objects)


cont = conn2.create_container('Testlibcloudcontainer')
obj = cont.create_object('t.html')
obj.content_type = 'text/html'

#Finally for some data.
obj.write('<html><head></head><body><img src="xd_logo.jpg"><br />Hello world!</body></html>')
Beispiel #52
0
def run():
    usage = 'usage: %prog --username=<api username> --key=<api key> [options]'
    parser = OptionParser(usage=usage)
    parser.add_option('--provider', dest='provider', default='CLOUDFILES_US',
                      help='Provider to use')
    parser.add_option('--region', dest='region', default=None,
                      help='Region to use if a Libcloud driver supports \
                        multiple regions (e.g. ORD for CloudFiles provider)')
    parser.add_option('--username', dest='api_username',
                      help='API username')
    parser.add_option('--key', dest='api_key',
                      help='API key')
    parser.add_option('--restore', dest='restore', action="store_true",
                      help='Restore from')
    parser.add_option('--container-name', dest='container_name',
                      default='file_syncer',
                      help='Name of the container storing the files')
    parser.add_option('--directory', dest='directory',
                      help='Local directory to sync')
    parser.add_option('--cache-path', dest='cache_path',
                      default=os.path.expanduser('~/.file_syncer'),
                      help='Directory where a settings and cached manifest ' +
                           'files are stored')
    parser.add_option('--concurrency', dest='concurrency', default=10,
                      help='File upload concurrency')
    parser.add_option('--exclude', dest='exclude',
                      help='Comma separated list of file name patterns to ' +
                           'exclude')
    parser.add_option('--log-level', dest='log_level', default='INFO',
                      help='Log level')
    parser.add_option('--delete', dest='delete', action='store_true',
                      help='delete extraneous files from dest containers',
                      default=False)
    parser.add_option('--auto-content-type', dest='auto_content_type',
                      default=False, action='store_true',
                      help='Don\'t automatically specify \'application/' +
                           'octet-stream\' content-type for every file. Use ' +
                           'automatic file type detection based on the file ' +
                           'extension')
    parser.add_option('--ignore-symlinks', dest='ignore_symlinks',
                      default=False, action='store_true',
                      help='Don\'t visit directories pointed to by ' +
                      'symlinks, on systems that support them')

    (options, args) = parser.parse_args()

    for option_name, key in REQUIRED_OPTIONS:
        if not getattr(options, key, None):
            raise ValueError('Missing required argument: ' + option_name)

    # Set up provider
    if options.provider not in SUPPORTED_PROVIDERS:
        raise ValueError('Invalid provider: %s. Valid providers are: %s' %
                         (options.provider, ', '.join(SUPPORTED_PROVIDERS)))

    provider = PROVIDER_MAP[options.provider]

    # Set up logger
    log_level = options.log_level.upper()

    if log_level not in VALID_LOG_LEVELS:
        valid_levels = [value.lower() for value in VALID_LOG_LEVELS]
        raise ValueError('Invalid log level: %s. Valid log levels are: %s' %
                         (options.log_level, ', ' .join(valid_levels)))

    level = getattr(logging, log_level, 'INFO')
    logger = get_logger(handler=logging.StreamHandler(), level=level)

    directory = os.path.expanduser(options.directory)
    exclude_patterns = options.exclude or ''
    exclude_patterns = exclude_patterns.split(',')

    syncer = FileSyncer(directory=directory,
                        provider_cls=get_driver(provider),
                        provider=provider,
                        region=options.region,
                        username=options.api_username,
                        api_key=options.api_key,
                        container_name=options.container_name,
                        cache_path=options.cache_path,
                        exclude_patterns=exclude_patterns,
                        logger=logger,
                        concurrency=int(options.concurrency),
                        auto_content_type=options.auto_content_type,
                        ignore_symlinks=options.ignore_symlinks)
    if options.restore:
        syncer.restore()
    else:
        syncer.sync(options.delete)
Beispiel #53
0
 def _azure_client(self) -> Container:
     driver = get_driver(self._provider)
     client = driver(self._account, self._key)
     return client.get_container(self._container)
Beispiel #54
0
from libcloud.storage.types import Provider
from libcloud.storage.providers import get_driver

account_id = 'XXXXXX'
application_key = 'YYYYYY'

cls = get_driver(Provider.BACKBLAZE_B2)
driver = cls(account_id, application_key)
Beispiel #55
0
    rootstring=''
    datestamp=''
    container_name = ('')
# Set up logging file
logfilename = loggydatestamp + '-Shoveller-Cloud' + '.log'
print 'Logging to ' + logfilename
logging.basicConfig(filename=logfilename,filemode='w',level=logging.INFO,format='%(asctime)s %(message)s')
initialloggystring = 'New scan started.' + loggydatestamp
print initialloggystring
logging.info(initialloggystring)
errorcount = 0

print '\nLogging in...'
#Security Block -- Logging in with our certificates
libcloud.security.VERIFY_SSL_CERT = False
Ninefold = get_driver(Provider.NINEFOLD)
driver = Ninefold(access_token, shared_secret)
# This plays out as driver = Ninefold('YOUR Atmos Access Token HERE', 'YOUR Atmos Shared Secret HERE')

#Functions for printing the list of files and folders in cloud storage
def showcloudassets():
    try:
        containers = driver.list_containers()
        print '\nList of Containers\n'
        pprint(containers)
        print '\n'
    except:
        print "*** Error occurred: ", sys.exc_info()[0] , " ***"
        print 'Exiting...'
        sys.exit(1)	
Beispiel #56
0
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements.  See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License.  You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from pprint import pprint

from libcloud.storage.types import Provider
from libcloud.storage.providers import get_driver

CloudFiles = get_driver(Provider.CLOUDFILES_UK)

driver = CloudFiles('access key id', 'secret key')

containers = driver.list_containers()
container_objects = driver.list_container_objects(containers[0])

pprint(containers)
pprint(container_objects)
Beispiel #57
0
 def make_driver(self):
     provider = getattr(Provider, self.provider_name)
     return get_driver(provider)(self.username, self.secret, region=self.region)
Beispiel #58
0
 def __init__(self):
     super(LibCloudBackend, self).__init__()
     # try:
     self.driver_cls = get_driver(LIBCLOUD_DRIVER)
     self.driver = self.driver_cls(*CREDENTIALS)
     self.container = self.driver.get_container(CONTAINER_NAME)
Beispiel #59
0
from libcloud.storage.types import Provider
from libcloud.storage.providers import get_driver

access_key = 'XXXXXX'
secret_key = 'YYYYYY'

cls = get_driver(Provider.AURORAOBJECTS)
driver = cls(access_key, secret_key)