def build_python_lambdas(bundle_name, project_path): project_base_folder = os.path.basename(os.path.normpath(project_path)) project_abs_path = build_path(CONFIG.project_path, project_path) _LOG.info('Going to process python project by path: {0}'.format( project_abs_path)) target_folder = build_path(CONFIG.project_path, ARTIFACTS_FOLDER, bundle_name) _LOG.debug('Target directory: {0}'.format(target_folder)) executor = ThreadPoolExecutor(max_workers=5) futures = [] for root, sub_dirs, files in os.walk(project_abs_path): for item in files: if item.endswith(LAMBDA_CONFIG_FILE_NAME): _LOG.info('Going to build artifact in: {0}'.format(root)) arg = { 'item': item, 'project_base_folder': project_base_folder, 'project_path': project_path, 'root': root, 'target_folder': target_folder } futures.append(executor.submit(_build_python_artifact, arg)) concurrent.futures.wait(futures, return_when=ALL_COMPLETED) executor.shutdown() _LOG.info('Python project was processed successfully')
def _install_local_req(artifact_path, local_req_path, project_base_folder, project_path): with open(local_req_path) as f: local_req_list = f.readlines() local_req_list = [path_resolver(r.strip()) for r in local_req_list] _LOG.debug('Local dependencies: {0}'.format(prettify_json(local_req_list))) # copy folders for lrp in local_req_list: _LOG.debug('Processing dependency: {0}'.format(lrp)) folder_path = build_path(artifact_path, project_base_folder, lrp) if not os.path.exists(folder_path): os.makedirs(folder_path) dir_util.copy_tree(build_path(CONFIG.project_path, project_path, lrp), folder_path) _LOG.debug('Dependency was copied successfully') folders = [r for r in lrp.split(DEFAULT_SEP) if r] # process folder from root python project folders.insert(0, '') i = 0 temp_path = '' while i < len(folders): temp_path += DEFAULT_SEP + folders[i] src_path = build_path(CONFIG.project_path, project_path, temp_path) dst_path = build_path(artifact_path, project_base_folder, temp_path) _copy_py_files(src_path, dst_path) i += 1 _LOG.debug('Python files from packages were copied successfully')
def upload_bundle_to_s3(bundle_name, force): if if_bundle_exist(bundle_name) and not force: raise AssertionError('Bundle name {0} already exists ' 'in deploy bucket. Please use another bundle ' 'name or delete the bundle'.format(bundle_name)) bundle_path = build_path(CONFIG.project_path, ARTIFACTS_FOLDER, bundle_name) build_meta_path = build_path(bundle_path, BUILD_META_FILE_NAME) meta_resources = json.load(open(build_meta_path)) validate_deployment_packages(meta_resources) _LOG.info('Bundle was validated successfully') paths = [] for root, dirs, file_names in os.walk(bundle_path): for file_name in file_names: paths.append(file_name) # paths = artifact_paths(meta_resources) # paths.append(build_path(bundle_name, BUILD_META_FILE_NAME)) executor = ThreadPoolExecutor(max_workers=10) futures = [] for path in paths: if 'output/' not in path: path_to_package = build_path(CONFIG.project_path, ARTIFACTS_FOLDER, bundle_name, path) _LOG.debug('Going to upload file: {0}'.format(path_to_package)) arg = { 'path': build_path(bundle_name, path), 'path_to_package': path_to_package } futures.append(executor.submit(_put_package_to_s3, arg)) return futures
def load_bundle(bundle_name, src_account_id, src_bucket_region, src_bucket_name, role_name): if not _S3_CONN.is_bucket_exists(CONFIG.deploy_target_bucket): raise AssertionError("Bundles bucket {0} does not exist " " Please use 'create_deploy_target_bucket' to " "create the bucket.".format( CONFIG.deploy_target_bucket)) try: _LOG.debug( 'Going to assume {0} role from {1} account'.format(role_name, src_account_id)) credentials = sts.get_temp_credentials(role_name, src_account_id, 3600) access_key = credentials['AccessKeyId'] secret_key = credentials['SecretAccessKey'] session_token = credentials['SessionToken'] src_s3_conn = S3Connection(region=src_bucket_region, aws_access_key_id=access_key, aws_secret_access_key=secret_key, aws_session_token=session_token) _LOG.debug('Credentials were assumed successfully') except ClientError: raise AssertionError('Cannot assume {0} role. Please verify that ' 'the role exists and has correct trusted ' 'relationships to be assumed from {1}' ' account.'.format(role_name, CONFIG.account_id)) if not src_s3_conn.is_bucket_exists(src_bucket_name): raise AssertionError( "{0} account does not have a {1} bucket. Please verify that you " "have configured the correct bucket name.".format(src_account_id, src_bucket_name)) _LOG.info('Going to find S3 keys for bundle: {0}'.format(bundle_name)) objects = src_s3_conn.list_objects(bucket_name=src_bucket_name, prefix=bundle_name) artifacts_names = [meta['Key'] for meta in objects] _LOG.info('Found {0} artifacts: {1}'.format(len(artifacts_names), artifacts_names)) bundle_path = build_path(CONFIG.project_path, ARTIFACTS_FOLDER, bundle_name) for dirpath, dirnames, files in os.walk(bundle_path): if files: raise AssertionError('Bundle name is already exists. ' 'Please, verify that have configured ' 'the correct bundle name.') # TODO create_pool can be used executor = ThreadPoolExecutor(max_workers=10) futures = [] for key in artifacts_names: arg = { 'conn': src_s3_conn, 'bucket_name': src_bucket_name, 'key': key, 'path': build_path(CONFIG.project_path, ARTIFACTS_FOLDER, key) } futures.append(executor.submit(_download_package_from_s3, arg)) return futures
def validate_deployment_packages(meta_resources): package_paths = artifact_paths(meta_resources) bundles_path = build_path(CONFIG.project_path, ARTIFACTS_FOLDER) nonexistent_packages = [] for package in package_paths: package_path = build_path(bundles_path, package) if not os.path.exists(package_path): nonexistent_packages.append(package_path) if nonexistent_packages: raise AssertionError('Bundle is not properly configured.' ' Nonexistent deployment packages: ' '{0}'.format(prettify_json(nonexistent_packages)))
def _build_node_artifact(item, root, target_folder): _check_npm_is_installed() _LOG.debug('Building artifact in {0}'.format(target_folder)) lambda_config_dict = json.load(open(build_path(root, item))) _LOG.debug('Root path: {}'.format(root)) req_params = ['lambda_path', 'name', 'version'] validate_params(root, lambda_config_dict, req_params) lambda_name = lambda_config_dict['name'] lambda_version = lambda_config_dict['version'] artifact_name = lambda_name + '-' + lambda_version # create folder to store artifacts artifact_path = build_path(target_folder, artifact_name) _LOG.debug('Artifacts path: {0}'.format(artifact_path)) if not os.path.exists(artifact_path): os.makedirs(artifact_path) _LOG.debug('Folders are created') # getting file content req_path = build_path(root, NODE_REQ_FILE_NAME) try: if os.path.exists(req_path): command = 'npm install --prefix {0}'.format(root) execute_command(command=command) _LOG.debug('3-rd party dependencies were installed successfully') package_name = build_py_package_name(lambda_name, lambda_version) zip_dir(root, build_path(target_folder, package_name)) lock = threading.RLock() lock.acquire() try: # remove unused folder/files node_modules_path = os.path.join(root, 'node_modules') if os.path.exists(node_modules_path): shutil.rmtree(node_modules_path) # todo Investigate deleting package_lock file # shutil.rmtree(os.path.join(root, 'package_lock.json')) shutil.rmtree(artifact_path) except FileNotFoundError as e: _LOG.exception('Error occurred while temp files removing.') finally: lock.release() return 'Lambda package {0} was created successfully'.format( package_name) except Exception: _LOG.exception( 'Error occurred during the \'{0}\' lambda deployment package ' 'assembling'.format(lambda_name)) return 'Error occurred during the \'{0}\' lambda deployment package ' \ 'assembling'.format(lambda_name)
def assemble_java_mvn_lambdas(project_path, bundles_dir): from syndicate.core import CONFIG src_path = build_path(CONFIG.project_path, project_path) _LOG.info( 'Going to process java mvn project by path: {0}'.format(src_path)) execute_command_by_path(command='mvn clean install', path=src_path) # copy java artifacts to the target folder for root, dirs, files in os.walk(src_path): for file in files: if file.endswith(".jar") or file.endswith(".war") \ or file.endswith(".zip"): shutil.copyfile(build_path(root, file), build_path(bundles_dir, file)) _LOG.info('Java mvn project was processed successfully')
def create_meta(bundle_name): # create overall meta.json with all resource meta info meta_path = build_path(CONFIG.project_path, ARTIFACTS_FOLDER, bundle_name) _LOG.info("Bundle path: {0}".format(meta_path)) overall_meta = create_resource_json(bundle_name=bundle_name) write_content_to_file(meta_path, BUILD_META_FILE_NAME, overall_meta)
def _populate_s3_path_ebs(meta, bundle_name): deployment_package = meta.get('deployment_package') if not deployment_package: raise AssertionError('Beanstalk_app config must contain ' 'deployment_package. Existing configuration' ': {0}'.format(prettify_json(meta))) else: meta[S3_PATH_NAME] = build_path(bundle_name, deployment_package)
def _populate_s3_path_lambda_layer(meta, bundle_name): deployment_package = meta.get('deployment_package') if not deployment_package: raise AssertionError('Lambda Layer config must contain deployment_package. ' 'Existing configuration' ': {0}'.format(prettify_json(meta))) else: meta[S3_PATH_NAME] = build_path(bundle_name, deployment_package)
def _populate_s3_path_python_node(meta, bundle_name): name = meta.get('name') version = meta.get('version') if not name or not version: raise AssertionError('Lambda config must contain name and version. ' 'Existing configuration' ': {0}'.format(prettify_json(meta))) else: meta[S3_PATH_NAME] = build_path(bundle_name, build_py_package_name(name, version))
def assemble_node_lambdas(bundle_name, project_path): target_folder = build_path(CONFIG.project_path, ARTIFACTS_FOLDER, bundle_name) project_abs_path = build_path(CONFIG.project_path, project_path) _LOG.info('Going to package lambdas starting by path {0}'.format( project_abs_path)) executor = ThreadPoolExecutor(max_workers=5) futures = [] for root, sub_dirs, files in os.walk(project_abs_path): for item in files: if item.endswith(LAMBDA_CONFIG_FILE_NAME): _LOG.info('Going to build artifact in: {0}'.format(root)) arg = { 'item': item, 'root': root, 'target_folder': target_folder } futures.append(executor.submit(_build_node_artifact, arg)) for future in concurrent.futures.as_completed(futures): _LOG.info(future.result())
def build_mvn_lambdas(bundle_name, project_path): src_path = build_path(CONFIG.project_path, project_path) _LOG.info( 'Going to process java mvn project by path: {0}'.format(src_path)) target_folder = build_path(CONFIG.project_path, ARTIFACTS_FOLDER, bundle_name) if not os.path.exists(target_folder): os.makedirs(target_folder) _LOG.debug('Target directory: {0}'.format(target_folder)) execute_command_by_path(command='mvn clean install', path=src_path) # copy java artifacts to the target folder for root, dirs, files in os.walk(src_path): for file in files: if file.endswith(".jar") or file.endswith(".war") \ or file.endswith(".zip"): shutil.copyfile(build_path(root, file), build_path(target_folder, file)) _LOG.info('Java mvn project was processed successfully')
def _build_python_artifact(item, project_base_folder, project_path, root, target_folder): _LOG.debug('Building artifact in {0}'.format(target_folder)) lambda_config_dict = json.load(open(build_path(root, item))) req_params = ['lambda_path', 'name', 'version'] validate_params(root, lambda_config_dict, req_params) lambda_path = path_resolver(lambda_config_dict['lambda_path']) lambda_name = lambda_config_dict['name'] lambda_version = lambda_config_dict['version'] artifact_name = lambda_name + '-' + lambda_version # create folder to store artifacts artifact_path = build_path(target_folder, artifact_name) _LOG.debug('Artifacts path: {0}'.format(artifact_path)) os.makedirs(artifact_path) _LOG.debug('Folders are created') # install requirements.txt content # getting file content req_path = build_path(root, REQ_FILE_NAME) if os.path.exists(req_path): _LOG.debug('Going to install 3-rd party dependencies') with open(req_path) as f: req_list = f.readlines() req_list = [path_resolver(r.strip()) for r in req_list] _LOG.debug(str(req_list)) # install dependencies for lib in req_list: command = 'pip install {0} -t {1}'.format(lib, artifact_path) execute_command(command=command) _LOG.debug('3-rd party dependencies were installed successfully') # install local requirements local_req_path = build_path(root, LOCAL_REQ_FILE_NAME) if os.path.exists(local_req_path): _LOG.debug('Going to install local dependencies') _install_local_req(artifact_path, local_req_path, project_base_folder, project_path) _LOG.debug('Local dependencies were installed successfully') src_path = build_path(CONFIG.project_path, project_path, lambda_path) _copy_py_files(src_path, artifact_path) package_name = build_py_package_name(lambda_name, lambda_version) _zip_dir(artifact_path, build_path(target_folder, package_name)) # remove unused folder lock = threading.RLock() lock.acquire() try: shutil.rmtree(artifact_path) finally: lock.release() _LOG.info('Package {0} was created successfully'.format(package_name))
def _copy_py_files(search_path, destination_path): files = glob.iglob(build_path(search_path, _PY_EXT)) for py_file in files: if os.path.isfile(py_file): shutil.copy2(py_file, destination_path)
def load_meta_resources(bundle_name): key = build_path(bundle_name, BUILD_META_FILE_NAME) meta_file = _S3_CONN.load_file_body(CONFIG.deploy_target_bucket, key) return json.loads(meta_file)
def load_meta_resources(bundle_name): from syndicate.core import CONFIG, CONN key = build_path(bundle_name, BUILD_META_FILE_NAME) meta_file = CONN.s3().load_file_body( CONFIG.deploy_target_bucket, key) return json.loads(meta_file)
def resolve_bundle_directory(bundle_name): return build_path(resolve_all_bundles_directory(), bundle_name)
def resolve_all_bundles_directory(): from syndicate.core import CONF_PATH return build_path(CONF_PATH, ARTIFACTS_FOLDER)