def get_swift_connection(): # find api version for k in ('ST_AUTH_VERSION', 'OS_AUTH_VERSION', 'OS_IDENTITY_API_VERSION'): if k in os.environ: auth_version = os.environ[k] break else: auth_version = 1 # cast from string to int auth_version = int(float(auth_version)) if auth_version == 3: # keystone v3 try: auth_url = os.environ['OS_AUTH_URL'] auth_user = os.environ['OS_USERNAME'] auth_password = os.environ['OS_PASSWORD'] project_name = os.environ['OS_PROJECT_NAME'] except KeyError: raise UsageError( "You need to set OS_AUTH_URL, OS_USERNAME, OS_PASSWORD and " "OS_PROJECT_NAME for Swift authentication") auth_os_options = { 'user_domain_name': os.environ.get('OS_USER_DOMAIN_NAME', 'Default'), 'project_domain_name': os.environ.get('OS_PROJECT_DOMAIN_NAME', 'Default'), 'project_name': project_name } return Connection(auth_url, auth_user, auth_password, os_options=auth_os_options, auth_version='3') elif auth_version == 2: # keystone v2 (not implemented) raise NotImplementedError('keystone v2 is not supported') else: try: auth_url = os.environ['ST_AUTH'] auth_user = os.environ['ST_USER'] auth_password = os.environ['ST_KEY'] except KeyError: raise UsageError("You need to set ST_AUTH, ST_USER, ST_KEY for " "Swift authentication") return Connection(auth_url, auth_user, auth_password)
def main(args): mapper_bucket = args.get("mapper_bucket") input_bucket = args.get("input_bucket") mapper_id = args.get("mapper_id") src_keys = args.get("keys") _authurl = "http://" + args.get('url') + ":8080/auth/v1.0" conn = Connection(authurl=_authurl, user=_user, key=_key, tenant_name=_tenant_name, auth_version=_auth_version) keys = src_keys.split('/') # Download and process all keys for key in keys: print(key) start = time() _, response = conn.get_object(mapper_bucket, key) contents = response.read() contents = contents.strip() words = contents.split() result = "" for word in words: result = result + word.decode() + " 1\n" #s3.Bucket(mapper_bucket).put_object(Key=str(mapper_id), Body=result) conn.put_object(mapper_bucket, str(mapper_id), contents=result) #, content_type="image/jpeg") return {"res": "good"}
def __init_swift_fileserver(self): try: options = { 'authurl': self.authurl, 'user': self.user, 'key': self.key, 'auth_version': self.auth_version, 'tenant_name': self.tenant_name, 'insecure': True, 'timeout': CONN_TIMEOUT, 'os_options': { 'region_name': self.region_name } } headers = { 'Temp-URL-Key': self.temp_url_key, 'Temp-URL-Key-2': self.temp_url_key_2 } print_opts = copy.deepcopy(options) print_opts['key'] = '<password stripped>' self.swift_conn = Connection(**options) self.swift_conn.post_account(headers) self.swift_conn.put_container(self.container_name) self.storageurl = self.swift_conn.get_auth()[0] print('swift-file-server: Connected. options %s storageurl %s', print_opts, self.storageurl) except Exception as err: print('swift-file-server: Connect FAILED %s options %s', err, print_opts) raise
def swift_connect(): conn = Connection(authurl=default_authurl, user=default_user, key=default_key, auth_version=default_auth_version, os_options=None) return conn
def __init__(self, bucket, noop): """Setup the S3 storage backend with the bucket we will use and optional region.""" # This is our Swift container self.bucket = bucket self.noop = noop # We assume your environment variables are set correctly just like # you would for the swift command line util try: self.conn = Connection(authurl=os.environ["ST_AUTH"], user=os.environ["ST_USER"], key=os.environ["ST_KEY"], timeout=30) except KeyError: logger.warning( "Missing environment variables for Swift authentication") logger.warning("Bailing...") sys.exit(1) headers, objs = self.conn.get_account(self.bucket) for i in objs: logger.debug("Searching for bucket %s == %s" % (self.bucket, i)) if not noop and self.bucket not in objs: self.conn.put_container(self.bucket)
def __init__(self): parser = OptionParser( usage="%prog [options] memstore_admin password", version="%prog v" + VERSION, description= "List Miniserver snapshots stored in a Memstore(tm) instance.", epilog= "For further information, please visit: http://www.memset.com/cloud/storage/" ) parser.add_option( "--auth_url", type="str", dest="auth_url", default=AUTH_URL, help="Auth URL (default: %s)" % AUTH_URL, ) self.options, self.args = parser.parse_args() if len(self.args) != 2: parser.error("Not enough parameters provided") self.conn = Connection(self.options.auth_url, self.args[0], self.args[1])
def get_connection(store_settings: dict) -> Connection: """ get an objectsctore connection """ store = store_settings os_options = { 'tenant_id': store['TENANT_ID'], 'region_name': store['REGION_NAME'], # 'endpoint_type': 'internalURL' } # when we are running in cloudvps we should use internal urls use_internal = os.getenv('OBJECTSTORE_LOCAL', '') if use_internal: os_options['endpoint_type'] = 'internalURL' connection = Connection(authurl=store['AUTHURL'], user=store['USER'], key=store['PASSWORD'], tenant_name=store['TENANT_NAME'], auth_version=store['VERSION'], os_options=os_options) return connection
def main(args): output_bucket = args.get("output_bucket") input_reducer_bucket = args.get("input_reducer_bucket") _authurl = "http://" + args.get('url') + ":8080/auth/v1.0" conn = Connection(authurl=_authurl, user=_user, key=_key, tenant_name=_tenant_name, auth_version=_auth_version) all_keys = [] for obj in conn.get_container(input_reducer_bucket)[1]: all_keys.append(obj['name']) final_str = [] for key in all_keys: start = time() response = conn.get_object(input_reducer_bucket, key) contents = response.read() contents = contents.decode() final_str.append(to_string(getResultReduce(contents))) final_str = "\n".join(final_str) print(final_str) final_res = getResultReduce(final_str) conn.put_object(output_bucket, "reduce_res.json", contents=json.dumps(final_res)) return {"res": "good"}
def __init__(self, session=None, swift_storage_url=None, swift_auth_token=None, swift_auth_v1_url=None, swift_user=None, swift_key=None): """Create new OpenStack Swift Object Storage Session. Three methods are possible: 1. Create session by the swiftclient library. 2. The SWIFT_STORAGE_URL and SWIFT_AUTH_TOKEN (this method is recommended by GDAL docs). 3. The SWIFT_AUTH_V1_URL, SWIFT_USER and SWIFT_KEY (This depends on the swiftclient library). Parameters ---------- session: optional A swiftclient connection object swift_storage_url: the storage URL swift_auth_token: the value of the x-auth-token authorization token swift_storage_url: string, optional authentication URL swift_user: string, optional user name to authenticate as swift_key: string, optional key/password to authenticate with Examples -------- >>> import rasterio >>> from rasterio.session import SwiftSession >>> fp = '/vsiswift/bucket/key.tif' >>> conn = Connection(authurl='http://127.0.0.1:7777/auth/v1.0', user='******', key='testing') >>> session = SwiftSession(conn) >>> with rasterio.Env(session): >>> with rasterio.open(fp) as src: >>> print(src.profile) """ if swift_storage_url and swift_auth_token: self._creds = { "swift_storage_url": swift_storage_url, "swift_auth_token": swift_auth_token } else: from swiftclient.client import Connection if session: self._session = session else: self._session = Connection(authurl=swift_auth_v1_url, user=swift_user, key=swift_key) self._creds = { "swift_storage_url": self._session.get_auth()[0], "swift_auth_token": self._session.get_auth()[1] }
def test_load_handled_files_invalid(self, _): store = StoreObjectStorage('handled_files_test', container_name='foobar', connection=Connection()) store.whitelisted.append('foobar') items = store.load_handled_files() self.assertEqual(items, set())
def _auth(self): cnt = Connection(authurl=Configuration.auth_url, user=Configuration.username, key=Configuration.password, tenant_name=Configuration.tenant_name, auth_version=2, insecure=False) return cnt
def upload_output_to_s3(bucketName, filePrefix, url): print 'Uploading files to s3: %s' % bucketName _authurl = "http://" + url + ":8080/auth/v1.0" conn = Connection(authurl=_authurl, user=_user, key=_key, tenant_name=_tenant_name, auth_version=_auth_version) #Perfs counters # s3 = boto3.client('s3', config=botocore.client.Config( # max_pool_connections=MAX_PARALLEL_UPLOADS)) count = 0 totalSize = 0 results = [] pool = ThreadPool(MAX_PARALLEL_UPLOADS) sema = Semaphore(MAX_PARALLEL_UPLOADS) def upload_file(localFilePath, uploadFileName, fileSize): sema.acquire() try: print 'Start: %s [%dKB]' % (localFilePath, fileSize >> 10) with open(localFilePath, 'rb') as ifs: #s3.put_object(Body=ifs, Bucket=bucketName, # Key=uploadFileName, # StorageClass='REDUCED_REDUNDANCY') conn.put_object(bucketName, uploadFileName, contents=ifs) #, content_type="image/jpeg") print 'Done: %s' % localFilePath finally: sema.release() for fileName in list_output_files(): localFilePath = os.path.join(TEMP_OUTPUT_DIR, fileName) uploadFileName = os.path.join(filePrefix, fileName) fileSize = os.path.getsize(localFilePath) result = pool.apply_async(upload_file, args=(localFilePath, uploadFileName, fileSize)) results.append(result) count += 1 totalSize += fileSize # block until all threads are done for result in results: result.get() # block until all uploads are finished for _ in xrange(MAX_PARALLEL_UPLOADS): sema.acquire() print 'Uploaded %d files to Swift [total=%dKB]' % (count, totalSize >> 10) return (count, totalSize)
def _setup(self): auth = v3.Password(auth_url=self.auth_url, user_domain_name=self.user_domain_name, username=self.username, password=self.password, project_domain_name=self.project_domain_name, project_name=self.project_name) sess = session.Session(auth=auth) self.client = Connection(auth_version=self.auth_version, session=sess) self.quota = OpenStackQuota(self.client, self.tenant_id, self.limit)
def _get_client(self): auth = v3.Password( auth_url=self.auth_url, username=os.getenv('INFRABOX_STORAGE_SWIFT_USERNAME'), password=os.getenv('INFRABOX_STORAGE_SWIFT_PASSWORD'), user_domain_name=self.user_domain_name, project_name=self.project_name, project_domain_name=self.project_domain_name) keystone_session = session.Session(auth=auth) return Connection(session=keystone_session)
def _get_connection(self): return Connection( authurl=self._auth_url, cacert=self._ca_cert_path, user=self._swift_user, key=self._swift_password, auth_version=self._auth_version, os_options=self._os_options, retry_on_ratelimit=self._retry_on_ratelimit, timeout=self._connect_timeout or _DEFAULT_SWIFT_CONNECT_TIMEOUT, retries=self._retry_count or 5, )
def get_connection_v1(user, key): """ conn : swiftclient connection """ _authurl = "http://127.0.0.1:8080/auth/v1.0" _auth_version = '1' _user = user _key = key conn = Connection(authurl=_authurl, user=_user, key=_key) return conn
def get_swift_connection_userdocs(): """ Function which returns a connection to a swift object storage :return connection: connection to swift :rtype connection: swiftclient.client.Connection """ swift = Connection(authurl=settings.OS_AUTH_URL, user=settings.OS_USERNAME, key=settings.OS_PASSWORD, tenant_name=settings.OS_TENANT_NAME, auth_version=settings.OS_IDENTITY_API_VERSION, os_options=settings.OS_OPTIONS) return swift
def add_client(self): conn = Connection(authurl=self.config.auth_url, user=self.config.user, key=self.config.key, retries=self.config.retries, snet=self.config.is_snet, starting_backoff=float(self.config.starting_backoff), max_backoff=float(self.config.max_backoff), tenant_name=self.config.tenant_name, os_options=parse_extra_into_dict(self.config.custom_options), auth_version=self.config.auth_version, cacert=self.config.cacert, insecure=not self.config.should_validate_cert, ssl_compression=self.config.needs_tls_compr, retry_on_ratelimit=self.config.should_retr_ratelimit) try: conn.head_account() except Exception: self.logger.warn('Could not HEAD an account (%s), e:`%s`', self.config.name, format_exc()) self.client.put_client(conn)
def prepareffmpeg(url): _authurl = "http://" + url + ":8080/auth/v1.0" conn = Connection(authurl=_authurl, user=_user, key=_key, tenant_name=_tenant_name, auth_version=_auth_version) _, ffmpegExec = conn.get_object(container_name, "ffmpeg") with open(FFMPEG_PATH, 'w') as local: local.write(ffmpegExec) #shutil.copyfile('ffmpeg', FFMPEG_PATH) os.chmod(FFMPEG_PATH, 0o0755)
def swift_connect(_authurl=None, _auth_version=None, _user=None, _key=None): if (_authurl == None): _authurl = default_authurl if (_auth_version == None): _auth_version = default_auth_version if (_user == None): _user = default_user if (_key == None): _key = default_key conn = Connection(authurl=_authurl, user=_user, key=_key, auth_version=_auth_version, os_options=default_os_options) return conn
def main(args): output_bucket = args.get("output_bucket") mapper_bucket = args["mapper_bucket"] input_bucket = args["input_bucket"] n_mapper = args["n_mapper"] _authurl = "http://"+args.get('url')+":8080/auth/v1.0" conn = Connection( authurl=_authurl, user=_user, key=_key, tenant_name=_tenant_name, auth_version=_auth_version ) # Fetch all the keys all_keys = [] for obj in conn.get_container(input_bucket)[1]: all_keys.append(obj['name']) total_size = len(all_keys) batch_size = 0 if total_size % n_mapper == 0: batch_size = total_size / n_mapper else: batch_size = total_size // n_mapper + 1 for idx in range(n_mapper): print("mapper-" + str(idx) + ":" + str(all_keys[idx * batch_size: (idx + 1) * batch_size])) pool = ThreadPool(n_mapper) invoke_mapper_partial = partial(invoke_mapper_action, "default/mapper", mapper_bucket, input_bucket, all_keys, batch_size) pool.map(invoke_mapper_partial, range(n_mapper)) pool.close() pool.join() while True: res_s3 = conn.get_container(mapper_bucket)[1] if "Contents" not in res_s3.keys(): job_keys = [] else: job_keys = res_s3["Contents"] print("Wait Mapper Jobs ...") time.sleep(5) if len(job_keys) == n_mapper: print("[*] Map Done : mapper " + str(len(job_keys)) + " finished.") break return {"input_reducer_bucket": mapper_bucket, "output_bucket": output_bucket}
def createConnection(config): # Create a password auth plugin auth = v3.Password( auth_url=config.p.conf_file.get_swift_auth_url(), username=config.p.conf_file.get_swift_user(), password=config.p.conf_file.get_swift_password(), user_domain_name=config.p.conf_file.get_swift_user_domain_name(), project_name=config.p.conf_file.get_swift_project_name(), project_id=config.p.conf_file.get_swift_project_id()) # Create session sessionKeystone = session.Session(auth=auth, verify=False) # Create swiftclient Connection global swift swift = Connection(insecure=True, session=sessionKeystone)
def swift_client(context): if CONF.swift_url: # swift_url has a different format so doesn't need to be normalized url = '%(swift_url)s%(tenant)s' % {'swift_url': CONF.swift_url, 'tenant': context.tenant} else: url = get_endpoint(context.service_catalog, service_type=CONF.swift_service_type, endpoint_region=CONF.os_region_name, endpoint_type=CONF.swift_endpoint_type) client = Connection(preauthurl=url, preauthtoken=context.auth_token, tenant_name=context.tenant, snet=USE_SNET) return client
def __init__(self, container, name): if name == 'bag_brk': OBJECTSTORE['user'] = '******' OBJECTSTORE['key'] = BAG_OBJECTSTORE_PASSWORD OBJECTSTORE['os_options'][ 'tenant_id'] = '4f2f4b6342444c84b3580584587cfd18' OBJECTSTORE['tenant_name'] = 'BGE000081_BAG' else: OBJECTSTORE['user'] = '******' OBJECTSTORE['key'] = BGT_OBJECTSTORE_PASSWORD OBJECTSTORE['os_options'][ 'tenant_id'] = '1776010a62684386a08b094d89ce08d9' OBJECTSTORE['tenant_name'] = 'BGE000081_BGT' self.conn = Connection(**OBJECTSTORE) self.container = container
def __init__(self, container, name): if name == 'gob': OBJECTSTORE['user'] = '******' OBJECTSTORE['key'] = GOB_OBJECTSTORE_PASSWORD OBJECTSTORE['os_options'][ 'tenant_id'] = '2ede4a78773e453db73f52500ef748e5' OBJECTSTORE['tenant_name'] = 'BGE000081 GOB' else: OBJECTSTORE['user'] = '******' OBJECTSTORE['key'] = BGT_OBJECTSTORE_PASSWORD OBJECTSTORE['os_options'][ 'tenant_id'] = '1776010a62684386a08b094d89ce08d9' OBJECTSTORE['tenant_name'] = 'BGE000081_BGT' self.conn = Connection(**OBJECTSTORE) self.container = container
def swift_client(context, region_name=None): if CONF.swift_url: # swift_url has a different format so doesn't need to be normalized url = '%(swift_url)s%(tenant)s' % { 'swift_url': CONF.swift_url, 'tenant': context.project_id } else: region = region_name or CONF.service_credentials.region_name url = get_endpoint(context.service_catalog, service_type=CONF.swift_service_type, endpoint_region=region, endpoint_type=CONF.swift_endpoint_type) client = Connection(preauthurl=url, preauthtoken=context.auth_token, tenant_name=context.project_id, snet=CONF.backup_use_snet, insecure=CONF.swift_api_insecure) return client
def get_connection(): options = { 'tenant_id': store['TENANT_ID'], 'region_name': store['REGION_NAME'], 'endpoint_type': 'internalURL', } log.debug("Do we run local? set ENV LOCAL to something") if os.getenv('LOCAL', False): log.debug("We run LOCAL!") options.pop('endpoint_type') new_handelsregister_conn = Connection(authurl=store['AUTHURL'], user=store['USER'], key=store['PASSWORD'], tenant_name=store['TENANT_NAME'], auth_version=store['VERSION'], os_options=options) return new_handelsregister_conn
def upload_file(container, input_file, output_file): auth = v3.Password(auth_url = swift_auth, username = swift_me, password = swift_pass, user_domain_name='Default', project_name='Data Science', project_domain_name='Default') # Create session keystone_session = session.Session(auth=auth) # Create swiftclient Connection swift_conn = Connection(session=keystone_session) with open(input_file, 'rb') as local: swift_conn.put_object( container, output_file, contents=local, content_type='text/plain', chunk_size = 1024*100 )
def run(self, terms, variables=None, **kwargs): """Returns Ironic Inspector introspection data. Access swift and return introspection data for all nodes. :returns a list of tuples, one for each node. """ ret = [] session = get_auth_session(kwargs.get('auth_url'), "ironic", "service", kwargs.get('password')) swift_client = Connection(session=session) container = swift_client.get_container("ironic-inspector") for item in container[1]: if item['name'].startswith('inspector_data') and \ not item['name'].endswith("UNPROCESSED"): obj = swift_client.get_object("ironic-inspector", item['name']) ret.append((item['name'], obj)) return ret
def objectstore_connection(config_full_path, config_name, print_config_vars=None): """ Get an objectsctore connection. Args: 1. config_full_path: /path_to_config/config.ini or config.ini if in root. 2. config_name: objectstore 3. print_config_vars: if set to True: print all variables from the config file Returns: An objectstore connection session. """ assert os.environ['OBJECTSTORE_PASSWORD'] config = get_config(config_full_path) if print_config_vars: logger.info('config variables.. :{}'.format(OBJECTSTORE)) conn = Connection( authurl=config.get(config_name, 'AUTHURL'), user=config.get(config_name, 'USER'), key=os.environ['OBJECTSTORE_PASSWORD'], tenant_name=config.get(config_name, 'TENANT_NAME'), auth_version=config.get(config_name, 'VERSION'), os_options={ 'tenant_id': config.get(config_name, 'TENANT_ID'), 'region_name': config.get(config_name, 'REGION_NAME'), # 'endpoint_type': 'internalURL' }) logger.info('Established successfull connection to {}'.format( config.get(config_name, 'TENANT_NAME'))) return conn