def upload(params): proj_path = params["proj_path"] target_name = params["target_name"] build_dir = os.path.join(proj_path, const.DIR_NAME_BUILD, target_name, const.DIR_NAME_DIST) version = util.get_version(params, config) force = ls.list_has_value(params["args"], "--force") dist_file_path = os.path.join(build_dir, const.FILE_NAME_DIST_PACKED) aws_key_id = os.getenv(const.AWS_KEY_ID_ENV) aws_secret_key = os.getenv(const.AWS_SECRET_KEY_ENV) aws_bucket_name = const.AWS_S3_BUCKET_NAME aws_bucket_path = "{0}/{1}/{2}/{3}".format(const.AWS_S3_BUCKET_PATH, target_name, version, const.FILE_NAME_DIST_PACKED) a.s3_upload( file_path=dist_file_path, force=force, aws_bucket_name=aws_bucket_name, aws_bucket_key=aws_bucket_path, aws_key_id=aws_key_id, aws_secret_key=aws_secret_key, )
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] install_headers = target_config["install_headers"] param_dry_run = ls.list_has_value(params["args"], "--dry-run") if param_dry_run: l.i("Running in dry mode...") if archs and len(archs) > 0: for arch in archs: for build_type in build_types: l.i("Building for: {0}/{1}...".format(arch["conan_arch"], build_type)) # conan build build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, ) clean_build_dir = True if param_dry_run and os.path.isdir(build_dir): clean_build_dir = False if clean_build_dir: f.recreate_dir(build_dir) run_args = [ "conan", "build", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_RECIPE, const.FILE_NAME_FILES_TARGET_CONAN_RECIPE_CONANFILE_PY, ), "--source-folder", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CMAKE, ), "--build-folder", os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, ), "--install-folder", os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_CONAN, ), ] r.run(run_args, build_dir) # find correct info plist file plist_path1 = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Info.plist", ) plist_path2 = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Versions", "A", "Resources", "Info.plist", ) plist_path = "" if os.path.exists(plist_path1): plist_path = plist_path1 if os.path.exists(plist_path2): plist_path = plist_path2 # add minimum version inside plist r.run( [ "plutil", "-replace", "MinimumOSVersion", "-string", arch["min_version"], plist_path, ], proj_path, ) # add supported platform inside plist r.run( [ "plutil", "-replace", "CFBundleSupportedPlatforms", "-json", '[ "{0}" ]'.format(arch["supported_platform"]), plist_path, ], proj_path, ) # headers dist_headers_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Headers", ) f.create_dir(dist_headers_dir) if install_headers: for header in install_headers: source_header_dir = os.path.join( proj_path, header["path"]) if header["type"] == "dir": f.copy_dir( source_header_dir, dist_headers_dir, ignore_file=_header_ignore_list, symlinks=True, ) else: l.e("Invalid type for install header list for {0}". format(target_name)) # modules support_modules_dir = os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, "support", "modules", ) modules_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Modules", ) f.recreate_dir(modules_dir) f.copy_file( os.path.join(support_modules_dir, "module.modulemap"), os.path.join(modules_dir, "module.modulemap"), ) # umbrella header build_headers_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Headers", ) header_files = f.find_files( build_headers_dir, "*.h", recursive=True, ) content = f.get_file_contents( os.path.join(support_modules_dir, "umbrella-header.h")) for header_file in header_files: header_file = header_file.replace(build_headers_dir + "/", "") content = content + '#import "{0}"\n'.format(header_file) if len(content) > 0: umbrella_file = os.path.join( build_headers_dir, target_config["umbrella_header"]) f.copy_file( os.path.join(support_modules_dir, "umbrella-header.h"), umbrella_file, ) f.set_file_content(umbrella_file, content) else: l.e("{0}".format( "File not generated because framework headers is empty" )) l.ok() else: l.e('Arch list for "{0}" is invalid or empty'.format(target_name))
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] no_framework = ls.list_has_value(params["args"], "--no-framework") no_xcframework = ls.list_has_value(params["args"], "--no-xcframework") # at least one need be generated if no_framework and no_xcframework: l.e("You need let generate framework or xcframework, but both are disabled" ) # remove dist folder for the target dist_dir = os.path.join( proj_path, const.DIR_NAME_DIST, target_name, ) f.remove_dir(dist_dir) # generate framework if not no_framework: generate_framework( proj_path=proj_path, target_name=target_name, target_config=target_config, archs=archs, build_types=build_types, ) # generate xcframework if not no_xcframework: generate_xcframework( proj_path=proj_path, target_name=target_name, target_config=target_config, archs=archs, build_types=build_types, ) # add strip framework script (only required if final project use framework instead of xcframework) l.i("Adding strip framework script...") target_scripts_dir = os.path.join( const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_SUPPORT, "scripts", ) f.copy_dir( target_scripts_dir, os.path.join( const.DIR_NAME_DIST, target_name, "scripts", ), symlinks=True, ) # cocoapods l.i("Adding cocoapods script...") pod_file_path = os.path.join( const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_SUPPORT, "cocoapods", "{0}.podspec".format(target_config["project_name"]), ) target_pod_file_path = os.path.join( const.DIR_NAME_DIST, target_name, "{0}.podspec".format(target_config["project_name"]), ) f.copy_file( pod_file_path, target_pod_file_path, ) # xcframework group dir if not no_xcframework: if build_types and len(build_types) > 0: for build_type in build_types: xcframework_dir = os.path.join( dist_dir, build_type, "{0}.xcframework".format(target_config["project_name"]), ) found_dirs = f.find_dirs(xcframework_dir, "*") if found_dirs: first_group = os.path.basename(found_dirs[0]) f.replace_in_file( target_pod_file_path, "{XCFRAMEWORK_" + build_type.upper() + "_GROUP_DIR}", first_group, ) f.replace_in_file(target_pod_file_path, "{NAME}", target_config["project_name"]) f.replace_in_file(target_pod_file_path, "{VERSION}", target_config["version"]) # finish l.ok()
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] param_dry_run = ls.list_has_value(params["args"], "--dry-run") if param_dry_run: l.i("Running in dry mode...") if archs and len(archs) > 0: for arch in archs: for build_type in build_types: l.i("Building for: {0}/{1}...".format(arch["conan_arch"], build_type)) # conan build build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, ) clean_build_dir = True if param_dry_run and os.path.isdir(build_dir): clean_build_dir = False if clean_build_dir: f.recreate_dir(build_dir) run_args = [ "conan", "build", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_RECIPE, const.FILE_NAME_FILES_TARGET_CONAN_RECIPE_CONANFILE_PY, ), "--source-folder", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CMAKE, ), "--build-folder", os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, ), "--install-folder", os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["conan_arch"], const.DIR_NAME_BUILD_CONAN, ), ] r.run(run_args, cwd=build_dir) # copy assets if "assets_dir" in target_config: assets_dir = target_config["assets_dir"] assets_dir = os.path.join(proj_path, assets_dir) if os.path.isdir(assets_dir): build_assets_dir = os.path.join( build_dir, "bin", os.path.basename(assets_dir)) f.remove_dir(build_assets_dir) f.copy_dir(assets_dir, build_assets_dir, symlinks=True) l.ok() else: l.e('Arch list for "{0}" is invalid or empty'.format(target_name))
def docs_publish(params): proj_path = params["proj_path"] docs_name = ls.get_arg_list_value(params["args"], "--name") if not docs_name: docs_name = const.DOCS_DEFAULT_NAME docs_path = os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_DOCS, docs_name, ) output_path = os.path.join( proj_path, const.DIR_NAME_BUILD, const.DIR_NAME_BUILD_DOCS, docs_name, ) ignore_files = [".DS_Store", "Thumbs.db"] config_data = config.run(proj_path, params) config_data = config_data[docs_name] has_tool = check_tool_mkdocs() if has_tool: params["target_name"] = "docs" # prepare data version = config_data["version"] if "version" in config_data else None append_version = (config_data["append_version"] if "append_version" in config_data else None) force = ls.list_has_value(params["args"], "--force") aws_key_id = os.getenv(const.AWS_KEY_ID_ENV) aws_secret_key = os.getenv(const.AWS_SECRET_KEY_ENV) aws_bucket_name = config_data["bucket_name"] aws_bucket_path = "{0}".format(config_data["bucket_path"]) if append_version: aws_bucket_path = "{0}/{1}".format(aws_bucket_path, version) # generate files run_args = [ "mkdocs", "build", "--clean", "--config-file", "mkdocs.yml", "-d", output_path, ] r.run(run_args, cwd=docs_path) # version if append_version: if not version or len(version) == 0: l.e("You need define version name (parameter: --version)") l.i("Version defined: {0}".format(version)) # prepare to upload if not os.path.isdir(docs_path): l.e("Documentation output folder not exists: {0}".format( docs_path)) # prepare aws sdk l.i("Initializing AWS bucket and SDK...") if not aws_key_id or not aws_secret_key: l.failed("Your AWS credentials are invalid") s3_client = boto3.client( service_name="s3", aws_secret_access_key=aws_secret_key, aws_access_key_id=aws_key_id, ) # checking for existing path l.i('Checking if remote path "{0}" exists on AWS...'.format( aws_bucket_path, )) has_remote_path = a.s3_path_exists( s3_client, aws_bucket_name, aws_bucket_path, ) if has_remote_path: if force: l.i('The path "{0}" already exists on AWS, removing...'.format( aws_bucket_path)) a.s3_delete_path( s3_client, aws_bucket_name, aws_bucket_path, ) else: l.e('The path "{0}" already exists on AWS'.format( aws_bucket_path)) # create path folder a.s3_create_path( s3_client, aws_bucket_name, aws_bucket_path, ) # upload walks = os.walk(output_path) for source, dirs, files in walks: l.i("Entering directory: {0}".format(source)) for filename in files: if filename in ignore_files: continue local_file_path = os.path.join(source, filename) relative_path = os.path.relpath(local_file_path, output_path) s3_file = os.path.join(aws_bucket_path, relative_path) l.i('Uploading file "{0}" to S3 bucket "{1}"...'.format( relative_path, aws_bucket_name)) extra_args = {} if os.path.isdir(local_file_path): extra_args = { "ACL": "public-read", } elif os.path.isfile(local_file_path): mime_type = mime.guess_type(local_file_path) extra_args = { "ACL": "public-read", "ContentType": (mime_type[0] if mime_type != None and len(mime_type) > 0 and mime_type[0] != None else ""), } s3_client.upload_file( local_file_path, aws_bucket_name, s3_file, ExtraArgs=extra_args, Callback=a.ProgressPercentage(local_file_path), ) if append_version: l.colored( "[DONE] You can access documentation here: {0}/{1}/index.html". format( config_data["url"], version, ), l.BLUE, ) else: l.colored( "[DONE] You can access documentation here: {0}/index.html". format(config_data["url"], ), l.BLUE, ) l.ok()