def put_s3(i, args): print 'Thread %4d: Start' % i cf_driver = get_driver(Provider.CLOUDFILES_US) cf = cf_driver(args.cf_username, args.cf_password, ex_force_service_region=args.cf_region) cf_cont = cf.get_container(args.cf_container) s3_driver = get_driver(Provider.S3) s3 = s3_driver(args.s3_access_id, args.s3_access_key) s3_cont = s3.get_container(args.s3_container) while 1: try: obj = queue.get_nowait() except gevent.queue.Empty: print 'Thread %4d: Queue empty' % i raise gevent.GreenletExit else: obj.driver = cf obj.container = cf_cont print 'Thread %4d: Upload %s' % (i, obj.name) obj_stream = obj.as_stream() s3_cont.upload_object_via_stream(obj_stream, obj.name, extra=obj.extra) print 'Thread %4d: Upload complete %s' % (i, obj.name) print 'Thread %4d: Complete' % i
def get_driver_helper(provider_name): if provider_name == 'cloudfiles': return get_driver(Provider.CLOUDFILES_US) elif provider_name == 's3': return get_driver(Provider.S3) else: print 'Unknown provider %s!' % provider_name sys.exit(1)
def html(revision_id, path="index.html"): container = Container( app.config["BUCKET"], None, get_driver(Provider.CLOUDFILES)( app.config["RACKSPACE_USER"], app.config["RACKSPACE_APIKEY"], region=app.config["RACKSPACE_REGION"], ), ) # See if the requested file exists try: obj = container.get_object( "html/{revision}/{path}".format(revision=revision_id, path=path) ) except ObjectDoesNotExistError: abort(404) # Get the requested file data = b"" for chunk in obj.as_stream(): data += chunk return data, 200, {k.replace("_", "-"): v for k, v in obj.extra.items()}
def __init__(self, config=None): self.provider = config extra_kwargs = {} if 'region' in self.provider: extra_kwargs['region'] = self.provider['region'] try: provider_type = self.provider['type'] if isinstance(provider_type, str): module_path, tag = provider_type.rsplit('.', 1) if module_path != 'libcloud.storage.types.Provider': raise ValueError("Invalid module path") provider_type = getattr(Provider, tag) Driver = get_driver(provider_type) self.driver = Driver( self.provider['user'], self.provider['key'], **extra_kwargs ) except Exception as e: raise ImproperlyConfigured( "Unable to create libcloud driver type %s: %s" % (self.provider.get('type'), e)) self.bucket = self.provider['bucket'] # Limit to one container self.base_url = self.provider['base_url']
def __init__(self, provider_name, option=None): self.provider = settings.LIBCLOUD_PROVIDERS.get(provider_name) if not self.provider: raise ImproperlyConfigured( 'LIBCLOUD_PROVIDERS %s not define or invalid' % provider_name) try: Driver = get_driver(self.provider['type']) user = self.provide.get('user', None) key = self.provider.get('key', None) if provider_name == 'google' and not user: self.driver = Driver( key, secret=self.provider['secret'], project=self.provider['project'] ) else: self.driver = Driver( user, key, ) except Exception, e: raise ImproperlyConfigured( "Unable to create libcloud driver type %s" % \ (self.provider.get('type'), e))
def __init__(self, provider_name=None, option=None): if provider_name is None: provider_name = getattr(settings, 'DEFAULT_LIBCLOUD_PROVIDER', 'default') self.provider = settings.LIBCLOUD_PROVIDERS.get(provider_name) if not self.provider: raise ImproperlyConfigured( 'LIBCLOUD_PROVIDERS %s not defined or invalid' % provider_name) extra_kwargs = {} if 'region' in self.provider: extra_kwargs['region'] = self.provider['region'] try: provider_type = self.provider['type'] if isinstance(provider_type, basestring): module_path, tag = provider_type.rsplit('.', 1) if module_path != 'libcloud.storage.types.Provider': raise ValueError("Invalid module path") provider_type = getattr(Provider, tag) Driver = get_driver(provider_type) self.driver = Driver( self.provider['user'], self.provider['key'], **extra_kwargs ) except Exception as e: raise ImproperlyConfigured( "Unable to create libcloud driver type %s: %s" % \ (self.provider.get('type'), e)) self.bucket = self.provider['bucket'] # Limit to one container
def _get_storage(uri): """ Given a URI like local:///srv/repo or s3://key:secret/apt.example.com, return a libcloud storage or container object. """ driver = get_driver(uri.scheme) key = uri.username secret = uri.password container = uri.netloc if uri.scheme.startswith('s3'): if not key: key = os.environ.get('AWS_ACCESS_KEY_ID') if not secret: secret = os.environ.get('AWS_SECRET_ACCESS_KEY') if not (key and secret and container): raise ValueError('For S3 you must provide an access key ID, secret access key, and bucket name') elif uri.scheme == 'local': parts = [] if uri.netloc: parts.append(uri.netloc) if uri.path: parts.append(uri.path) if not parts: parts.append('.') base_path = os.path.abspath(''.join(parts)) key = os.path.dirname(base_path) container = os.path.basename(base_path) storage = driver(key, secret) try: return storage.get_container(container) except ContainerDoesNotExistError: return storage.create_container(container)
def handle_noargs(self, **options): try: cls = get_driver(Provider.AZURE_BLOBS) driver = cls(settings.AZURE_ACCOUNT_NAME, settings.AZURE_ACCOUNT_KEY) container = driver.get_container(container_name=settings.AZURE_CONTAINER) # .filter(upload_date__lte=datetime.today() - timedelta(days=180)) \ mixes = ( Mix.objects.exclude(archive_path__isnull=False) .annotate(num_plays=Count("activity_plays")) .order_by("num_plays") ) for mix in mixes: if os.path.isfile(mix.get_absolute_path()): print "Uploading file for: %s" % mix.slug file_name = "%s.%s" % (mix.uid, mix.filetype) archive_path = url_path_join(settings.AZURE_ITEM_BASE_URL, settings.AZURE_CONTAINER, file_name) with open(mix.get_absolute_path(), "rb") as iterator: obj = driver.upload_object_via_stream( iterator=iterator, container=container, object_name=file_name ) print "Uploaded" mix.archive_path = archive_path mix.save() expired_path = join(settings.MEDIA_ROOT, "mixes/archived") new_file = os.path.join(expired_path, basename(iterator.name)) os.rename(iterator.name, new_file) print "done- file is %s" % mix.archive_path except Exception, ex: print "Debug exception: %s" % ex.message
def driver(self): try: cloud_driver = get_driver(self._cloud_name) param_dict = self.get_driver_params() except: raise ValueError return cloud_driver(**param_dict)
def __init__(self): self.driver = get_driver( getattr( Provider, self.driver_name ) )(**self.driver_options) self.container = self.driver.get_container( container_name=self.container_name )
def cloudy(self): S3_ACCESS_ID = os.getenv('AWS_ACCESS_KEY_ID') S3_SECRET_KEY = os.getenv('AWS_SECRET_ACCESS_KEY') Driver = get_driver(Provider.S3) conn = Driver(S3_ACCESS_ID, S3_SECRET_KEY) containers = conn.list_containers() print('%d containers:' % len(containers)) for container in containers: print(' - %s' % container.name)
def setup(self): print "mounting test filesystem" args = shlex.split(self.MOUNT_CMD) p = subprocess.Popen(args) p.wait() if 0 != p.returncode: print >>sys.stderr, "failed to mount filesystem" sys.exit(1) print "setting up libcloud storage connection" self.storage_handle = get_driver(getattr(Provider, driver))(access_id, secret) print self.storage_handle
def create_destination(args): cf_driver = get_driver(Provider.CLOUDFILES_US) cf = cf_driver(args.cf_username, args.cf_password, ex_force_service_region=args.cf_region) s3_driver = get_driver(Provider.S3) s3 = s3_driver(args.s3_access_id, args.s3_access_key) if args.from_service == 'S3': try: cf.create_container(args.cf_container) except libcloud.storage.types.ContainerAlreadyExistsError: pass except Exception as e: raise SystemExit(e) return s3.get_container(args.s3_container) elif args.from_service == 'CF': try: s3.create_container(args.s3_container) except (libcloud.storage.types.ContainerAlreadyExistsError): pass except Exception as e: raise SystemExit(e) return cf.get_container(args.cf_container)
def __init__(self, provider_name, option=None): self.provider = settings.LIBCLOUD_PROVIDERS.get(provider_name) if not self.provider: raise ImproperlyConfigured( 'LIBCLOUD_PROVIDERS %s not define or invalid' % provider_name) try: Driver = get_driver(self.provider['type']) self.driver = Driver( self.provider['user'], self.provider['key'], ) except Exception, e: raise ImproperlyConfigured( "Unable to create libcloud driver type %s" % \ (self.provider.get('type'), e))
def get_driver_class(provider): """ Return the driver class :param provider: str - provider name :return: """ if "." in provider: parts = provider.split('.') kls = parts.pop() path = '.'.join(parts) module = import_module(path) if not hasattr(module, kls): raise ImportError('{0} provider not found at {1}'.format( kls, path)) driver = getattr(module, kls) else: driver = getattr(Provider, provider.upper()) return get_driver(driver)
def __init__(self, provider_name=None, option=None): if provider_name is None: provider_name = getattr(settings, "DEFAULT_LIBCLOUD_PROVIDER", "default") self.provider = settings.LIBCLOUD_PROVIDERS.get(provider_name) if not self.provider: raise ImproperlyConfigured("LIBCLOUD_PROVIDERS %s not defined or invalid" % provider_name) try: provider_type = self.provider["type"] if isinstance(provider_type, basestring): module_path, tag = provider_type.rsplit(".", 1) if module_path != "libcloud.storage.types.Provider": raise ValueError("Invalid module path") provider_type = getattr(Provider, tag) Driver = get_driver(provider_type) self.driver = Driver(self.provider["user"], self.provider["key"]) except Exception, e: raise ImproperlyConfigured("Unable to create libcloud driver type %s: %s" % (self.provider.get("type"), e))
def test_cloud_master_key_store_s3(request, tmpdir): try: access_key = request.config.getoption('--aws-access-key') secret_key = request.config.getoption('--aws-secret-key') bucket_name = request.config.getoption('--aws-s3-bucket') except ValueError: access_key = secret_key = bucket_name = None if access_key is None or secret_key is None or bucket_name is None: skip( '--aws-access-key/--aws-secret-key/--aws-s3-bucket are not ' 'provided; skipped' ) driver_cls = get_driver(Provider.S3) driver = driver_cls(access_key, secret_key) container = driver.get_container(container_name=bucket_name) tmpname = ''.join(map('{:02x}'.format, os.urandom(16))) s = CloudMasterKeyStore(driver, container, tmpname) key = RSAKey.generate(1024) # load() -- when not exists with raises(EmptyStoreError): s.load() try: # save() s.save(key) obj = driver.get_object(container.name, tmpname) dest = tmpdir / tmpname obj.download(str(dest)) saved = read_private_key_file(dest.open()) assert isinstance(saved, RSAKey) assert saved.get_base64() == key.get_base64() # load() -- when exists loaded = s.load() assert isinstance(loaded, RSAKey) assert loaded.get_base64() == key.get_base64() finally: try: o = driver.get_object(container.name, tmpname) except ObjectDoesNotExistError: pass else: o.delete()
def backup(self, dest_provider, dest_type, host): import subprocess from datetime import datetime from libcloud.storage.types import Provider, ContainerDoesNotExistError from libcloud.storage.providers import get_driver import libcloud.security # TODO Make this optional libcloud.security.VERIFY_SSL_CERT = False print host driver = get_driver(getattr(Provider, dest_provider))(self.config.destination_key, self.config.destination_secret) directory = expand_paths([self.config.source_name]) cmd = 'tar cvzpf - %s' % (' '.join(directory)) object_name = '%s-%s.tar.gz' % (self.config.destination_prefix, datetime.now().strftime('%Y-%m-%d')) container_name = self.config.destination_container # Create a container if it doesn't already exist try: container = driver.get_container(container_name=container_name) except ContainerDoesNotExistError: container = driver.create_container(container_name=container_name) pipe = subprocess.Popen(cmd, bufsize=0, shell=True, stdout=subprocess.PIPE) return_code = pipe.poll() print 'Uploading object...' while return_code is None: # Compress data in our directory and stream it directly to CF obj = container.upload_object_via_stream(iterator=pipe.stdout, object_name=object_name) return_code = pipe.poll() print 'Upload complete, transferred: %s KB' % ((obj.size / 1024))
def __init__(self, **options): """ Build a Cloud Provider. """ super(DefaultCloudProvider, self).__init__(**options) try: cls = get_driver(self._name) self._driver = cls(options['id'], options['key']) self._container = self._driver.get_container(self._container_name) logger.info( "Connected to %s container from %s provider.", self._container_name, self._name) except AttributeError: msg = "Provider %s doesn't exists in detours." self._handle_error(BadProviderError, msg, self._name) except ContainerDoesNotExistError: msg = "Container %s does not exists in %s provider." self._handle_error( BadContainerError, msg, self._container_name, self._name) except Exception: msg = "Fatal error while creating %s provider." self._handle_error(CloudDetoursError, msg, self._name)
from libcloud.storage.types import Provider from libcloud.storage.providers import get_driver account_id = 'XXXXXX' application_key = 'YYYYYY' cls = get_driver(Provider.BACKBLAZE_B2) driver = cls(account_id, application_key)
#!/usr/bin/env python # Code samples for blog post # http://mikethetechie.com/post/6975966936/controlling-the-environment-cloud-control-apis # # Note - I couldn't get this to work. For more info, see http://mikethetechie.com/private/6969971577/tumblr_lng0mzd5bg1qf6p57 import secrets # import user name RACKSPACE_USER and API key RACKSPACE_KEY from libcloud.storage.types import Provider from libcloud.storage.providers import get_driver from pprint import pprint Driver = get_driver(Provider.CLOUDFILES_UK) conn2 = Driver(secrets.RACKSPACE_USER, secrets.RACKSPACE_KEY, 'https://lon.auth.api.rackspacecloud.com/v1.0') containers = conn2.list_containers() container_objects = conn2.list_container_objects(containers[0]) pprint(containers) pprint(container_objects) cont = conn2.create_container('Testlibcloudcontainer') obj = cont.create_object('t.html') obj.content_type = 'text/html' #Finally for some data. obj.write('<html><head></head><body><img src="xd_logo.jpg"><br />Hello world!</body></html>')
def _azure_client(self) -> Container: driver = get_driver(self._provider) client = driver(self._account, self._key) return client.get_container(self._container)
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from pprint import pprint from libcloud.storage.types import Provider from libcloud.storage.providers import get_driver CloudFiles = get_driver(Provider.CLOUDFILES_UK) driver = CloudFiles('access key id', 'secret key') containers = driver.list_containers() container_objects = driver.list_container_objects(containers[0]) pprint(containers) pprint(container_objects)
rootstring='' datestamp='' container_name = ('') # Set up logging file logfilename = loggydatestamp + '-Shoveller-Cloud' + '.log' print 'Logging to ' + logfilename logging.basicConfig(filename=logfilename,filemode='w',level=logging.INFO,format='%(asctime)s %(message)s') initialloggystring = 'New scan started.' + loggydatestamp print initialloggystring logging.info(initialloggystring) errorcount = 0 print '\nLogging in...' #Security Block -- Logging in with our certificates libcloud.security.VERIFY_SSL_CERT = False Ninefold = get_driver(Provider.NINEFOLD) driver = Ninefold(access_token, shared_secret) # This plays out as driver = Ninefold('YOUR Atmos Access Token HERE', 'YOUR Atmos Shared Secret HERE') #Functions for printing the list of files and folders in cloud storage def showcloudassets(): try: containers = driver.list_containers() print '\nList of Containers\n' pprint(containers) print '\n' except: print "*** Error occurred: ", sys.exc_info()[0] , " ***" print 'Exiting...' sys.exit(1)
from __future__ import print_function from libcloud.storage.types import Provider from libcloud.storage.providers import get_driver auth_username = '******' auth_password = '******' auth_url = 'http://controller:5000' project_name = 'your_project_name_or_id' region_name = 'your_region_name' provider = get_driver(Provider.OPENSTACK_SWIFT) swift = provider(auth_username, auth_password, ex_force_auth_url=auth_url, ex_force_auth_version='2.0_password', ex_tenant_name=project_name, ex_force_service_region=region_name) container_name = 'fractals' container = swift.create_container(container_name=container_name) print(container) print(swift.list_containers()) file_path = 'goat.jpg' object_name = 'an amazing goat' container = swift.get_container(container_name=container_name) object = container.upload_object(file_path=file_path, object_name=object_name) objects = container.list_objects() print(objects)
def __init__(self): super(LibCloudBackend, self).__init__() # try: self.driver_cls = get_driver(LIBCLOUD_DRIVER) self.driver = self.driver_cls(*CREDENTIALS) self.container = self.driver.get_container(CONTAINER_NAME)
def run(): usage = 'usage: %prog --username=<api username> --key=<api key> [options]' parser = OptionParser(usage=usage) parser.add_option('--provider', dest='provider', default='CLOUDFILES_US', help='Provider to use') parser.add_option('--region', dest='region', default=None, help='Region to use if a Libcloud driver supports \ multiple regions (e.g. ORD for CloudFiles provider)') parser.add_option('--username', dest='api_username', help='API username') parser.add_option('--key', dest='api_key', help='API key') parser.add_option('--restore', dest='restore', action="store_true", help='Restore from') parser.add_option('--container-name', dest='container_name', default='file_syncer', help='Name of the container storing the files') parser.add_option('--directory', dest='directory', help='Local directory to sync') parser.add_option('--cache-path', dest='cache_path', default=os.path.expanduser('~/.file_syncer'), help='Directory where a settings and cached manifest ' + 'files are stored') parser.add_option('--concurrency', dest='concurrency', default=10, help='File upload concurrency') parser.add_option('--exclude', dest='exclude', help='Comma separated list of file name patterns to ' + 'exclude') parser.add_option('--log-level', dest='log_level', default='INFO', help='Log level') parser.add_option('--delete', dest='delete', action='store_true', help='delete extraneous files from dest containers', default=False) parser.add_option('--auto-content-type', dest='auto_content_type', default=False, action='store_true', help='Don\'t automatically specify \'application/' + 'octet-stream\' content-type for every file. Use ' + 'automatic file type detection based on the file ' + 'extension') parser.add_option('--ignore-symlinks', dest='ignore_symlinks', default=False, action='store_true', help='Don\'t visit directories pointed to by ' + 'symlinks, on systems that support them') (options, args) = parser.parse_args() for option_name, key in REQUIRED_OPTIONS: if not getattr(options, key, None): raise ValueError('Missing required argument: ' + option_name) # Set up provider if options.provider not in SUPPORTED_PROVIDERS: raise ValueError('Invalid provider: %s. Valid providers are: %s' % (options.provider, ', '.join(SUPPORTED_PROVIDERS))) provider = PROVIDER_MAP[options.provider] # Set up logger log_level = options.log_level.upper() if log_level not in VALID_LOG_LEVELS: valid_levels = [value.lower() for value in VALID_LOG_LEVELS] raise ValueError('Invalid log level: %s. Valid log levels are: %s' % (options.log_level, ', ' .join(valid_levels))) level = getattr(logging, log_level, 'INFO') logger = get_logger(handler=logging.StreamHandler(), level=level) directory = os.path.expanduser(options.directory) exclude_patterns = options.exclude or '' exclude_patterns = exclude_patterns.split(',') syncer = FileSyncer(directory=directory, provider_cls=get_driver(provider), provider=provider, region=options.region, username=options.api_username, api_key=options.api_key, container_name=options.container_name, cache_path=options.cache_path, exclude_patterns=exclude_patterns, logger=logger, concurrency=int(options.concurrency), auto_content_type=options.auto_content_type, ignore_symlinks=options.ignore_symlinks) if options.restore: syncer.restore() else: syncer.sync(options.delete)
from libcloud.storage.types import Provider from libcloud.storage.providers import get_driver access_key = 'XXXXXX' secret_key = 'YYYYYY' cls = get_driver(Provider.AURORAOBJECTS) driver = cls(access_key, secret_key)
def _get_driver(cls, name): """Wrapper for libcloud's get_driver for testing.""" return get_driver(name)
def make_driver(self): provider = getattr(Provider, self.provider_name) return get_driver(provider)(self.username, self.secret, region=self.region)