def get_s3_buckets(filter=".*", pool={}, details=False, env=None, region=None): result = [] s3_client = _connect("s3", env=env, region=region) def handle(bucket): bucket_name = bucket["Name"] if re.match(filter, bucket_name): arn = "arn:aws:s3:::%s" % bucket_name bucket = S3Bucket(arn) result.append(bucket) pool[arn] = bucket if details: try: response = s3_client.get_bucket_notification_configuration( Bucket=bucket_name) if response: notifications = parse_notification_configuration( response) bucket.notifications.extend(notifications) except Exception as e: LOG.warning("Unable to get details for bucket: %s", e) try: out = s3_client.list_buckets() # TODO: `handle` is not process safe, and threading may actually make the code slower parallelize(handle, out["Buckets"]) except Exception: pass return result
def run_parallel_download(): file_length = 10000000 class DownloadListener(ProxyListener): def forward_request(self, method, path, data, headers): sleep_time = int(path.replace('/', '')) time.sleep(sleep_time) response = Response() response.status_code = 200 response._content = ('%s' % sleep_time) * file_length return response test_port = 12124 tmp_file_pattern = '/tmp/test.%s' proxy = GenericProxy(port=test_port, update_listener=DownloadListener()) proxy.start() def do_download(param): tmp_file = tmp_file_pattern % param TMP_FILES.append(tmp_file) download('http://localhost:%s/%s' % (test_port, param), tmp_file) values = (1, 2, 3) parallelize(do_download, values) proxy.stop() for val in values: tmp_file = tmp_file_pattern % val assert len(load_file(tmp_file)) == file_length
def get_lambda_functions(filter='.*', details=False, pool={}, env=None): if MOCK_OBJ: return [] result = [] def handle(func): func_name = func['FunctionName'] if re.match(filter, func_name): arn = func['FunctionArn'] f = LambdaFunction(arn) pool[arn] = f result.append(f) if details: sources = get_lambda_event_sources(f.name(), env=env) for src in sources: arn = src['EventSourceArn'] f.event_sources.append(EventSource.get(arn, pool=pool)) try: code_map = get_lambda_code(func_name, env=env) f.targets = extract_endpoints(code_map, pool) except Exception: LOG.warning("Unable to get code for lambda '%s'" % func_name) try: out = cmd_lambda('list-functions', env) out = json.loads(out) parallelize(handle, out['Functions']) except socket.error: pass return result
def get_lambda_functions(filter=".*", details=False, pool={}, env=None, region=None): if MOCK_OBJ: return [] result = [] def handle(func): func_name = func["FunctionName"] if re.match(filter, func_name): arn = func["FunctionArn"] f = LambdaFunction(arn) pool[arn] = f result.append(f) if details: sources = get_lambda_event_sources(f.name(), env=env) for src in sources: arn = src["EventSourceArn"] f.event_sources.append(EventSource.get(arn, pool=pool)) try: code_map = get_lambda_code(func_name, env=env) f.targets = extract_endpoints(code_map, pool) except Exception: LOG.warning("Unable to get code for lambda '%s'" % func_name) try: lambda_client = _connect("lambda", env=env, region=region) out = lambda_client.list_functions() parallelize(handle, out["Functions"]) except Exception: pass return result
def get_s3_buckets(filter='.*', pool={}, details=False, env=None): result = [] def handle(bucket): bucket_name = bucket['Name'] if re.match(filter, bucket_name): arn = 'arn:aws:s3:::%s' % bucket_name bucket = S3Bucket(arn) result.append(bucket) pool[arn] = bucket if details: try: out = cmd_s3api('get-bucket-notification-configuration --bucket %s' % bucket_name, env=env) if out: out = json.loads(out) if 'CloudFunctionConfiguration' in out: func = out['CloudFunctionConfiguration']['CloudFunction'] func = EventSource.get(func, pool=pool) n = S3Notification(func.id) n.target = func bucket.notifications.append(n) except Exception as e: print('WARNING: Unable to get details for bucket: %s' % e) try: out = cmd_s3api('list-buckets', env) out = json.loads(out) parallelize(handle, out['Buckets']) except socket.error: pass return result
def get_s3_buckets(filter='.*', pool={}, details=False, env=None, region=None): result = [] s3_client = _connect('s3', env=env, region=region) def handle(bucket): bucket_name = bucket['Name'] if re.match(filter, bucket_name): arn = 'arn:aws:s3:::%s' % bucket_name bucket = S3Bucket(arn) result.append(bucket) pool[arn] = bucket if details: try: out = s3_client.get_bucket_notification(Bucket=bucket_name) if out: if 'CloudFunctionConfiguration' in out: func = out['CloudFunctionConfiguration'][ 'CloudFunction'] func = EventSource.get(func, pool=pool) n = S3Notification(func.id) n.target = func bucket.notifications.append(n) except Exception as e: print('WARNING: Unable to get details for bucket: %s' % e) try: out = s3_client.list_buckets() parallelize(handle, out['Buckets']) except Exception: pass return result
def get_elasticsearch_domains(filter=".*", pool=None, env=None, region=None): if pool is None: pool = {} result = [] try: es_client = _connect("es", env=env, region=region) out = es_client.list_domain_names() def handle(domain): domain = domain["DomainName"] if re.match(filter, domain): details = es_client.describe_elasticsearch_domain( DomainName=domain) details = details["DomainStatus"] arn = details["ARN"] es = ElasticSearch(arn) es.endpoint = details.get("Endpoint", "n/a") result.append(es) pool[arn] = es parallelize(handle, out["DomainNames"]) except Exception: pass return result
def get_dynamo_dbs(filter='.*', pool={}, env=None): result = [] try: out = cmd_dynamodb('list-tables', env) out = json.loads(out) def handle(table): if re.match(filter, table): details = cmd_dynamodb('describe-table --table-name %s' % table, env) details = json.loads(details)['Table'] arn = details['TableArn'] db = DynamoDB(arn) db.count = details['ItemCount'] db.bytes = details['TableSizeBytes'] db.created_at = details['CreationDateTime'] result.append(db) pool[arn] = db parallelize(handle, out['TableNames']) except socket.error: pass return result
def get_elasticsearch_domains(filter='.*', pool={}, env=None): result = [] try: out = cmd_es('list-domain-names', env) out = json.loads(out) def handle(domain): domain = domain['DomainName'] if re.match(filter, domain): details = cmd_es('describe-elasticsearch-domain --domain-name %s' % domain, env) details = json.loads(details)['DomainStatus'] arn = details['ARN'] es = ElasticSearch(arn) es.endpoint = details.get('Endpoint', 'n/a') result.append(es) pool[arn] = es parallelize(handle, out['DomainNames']) except socket.error: pass return result
def get_elasticsearch_domains(filter='.*', pool={}, env=None): result = [] try: out = cmd_es('list-domain-names', env) out = json.loads(out) def handle(domain): domain = domain['DomainName'] if re.match(filter, domain): details = cmd_es('describe-elasticsearch-domain --domain-name %s' % domain, env) details = json.loads(details)['DomainStatus'] arn = details['ARN'] es = ElasticSearch(arn) es.endpoint = details['Endpoint'] result.append(es) pool[arn] = es parallelize(handle, out['DomainNames']) except socket.error: pass return result
def get_dynamo_dbs(filter='.*', pool={}, env=None, region=None): result = [] try: dynamodb_client = _connect('dynamodb', env=env, region=region) out = dynamodb_client.list_tables() def handle(table): if re.match(filter, table): details = dynamodb_client.describe_table(TableName=table) details = details['Table'] arn = details['TableArn'] db = DynamoDB(arn) db.count = details['ItemCount'] db.bytes = details['TableSizeBytes'] db.created_at = details['CreationDateTime'] result.append(db) pool[arn] = db parallelize(handle, out['TableNames']) except Exception: pass return result
def get_elasticsearch_domains(filter='.*', pool={}, env=None, region=None): result = [] try: es_client = _connect('es', env=env, region=region) out = es_client.list_domain_names() def handle(domain): domain = domain['DomainName'] if re.match(filter, domain): details = es_client.describe_elasticsearch_domain( DomainName=domain) details = details['DomainStatus'] arn = details['ARN'] es = ElasticSearch(arn) es.endpoint = details.get('Endpoint', 'n/a') result.append(es) pool[arn] = es parallelize(handle, out['DomainNames']) except Exception: pass return result
def get_dynamo_dbs(filter=".*", pool=None, env=None, region=None): if pool is None: pool = {} result = [] try: dynamodb_client = _connect("dynamodb", env=env, region=region) out = dynamodb_client.list_tables() def handle(table): if re.match(filter, table): details = dynamodb_client.describe_table(TableName=table) details = details["Table"] arn = details["TableArn"] db = DynamoDB(arn) db.count = details["ItemCount"] db.bytes = details["TableSizeBytes"] db.created_at = details["CreationDateTime"] result.append(db) pool[arn] = db parallelize(handle, out["TableNames"]) except Exception: pass return result
def install_components(names): parallelize(install_component, names) install_amazon_kinesis_libs() install_lambda_java_libs()
def install_components(names): parallelize(install_component, names) install_lambda_java_libs()
def install_components(names): common.parallelize(install_component, names)