def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] if archs and len(archs) > 0: for arch in archs: for build_type in build_types: l.i("Building for: {0}/{1}...".format(arch["conan_arch"], build_type)) # conan install build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["conan_arch"], const.DIR_NAME_BUILD_CONAN, ) f.recreate_dir(build_dir) run_args = [ "conan", "install", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_RECIPE, const.FILE_NAME_FILES_TARGET_CONAN_RECIPE_CONANFILE_PY, ), "--profile", arch["conan_profile"], "-s", "arch={0}".format(arch["conan_arch"]), "-s", "build_type={0}".format(build_type), "-o", "ezored_arch={0}".format(arch["conan_arch"]), "-o", "ezored_name={0}".format(target_config["project_name"]), "-o", "ezored_version={0}".format(target_config["version"]), "--build=missing", "--update", ] r.run(run_args, build_dir) l.ok() else: l.e('Arch list for "{0}" is invalid or empty'.format(target_name))
def check_python_formatter(): """Checks if invoking supplied black binary works.""" try: subprocess.check_output(["black", "--version"]) return True except OSError: l.i("Black is not installed, check: https://github.com/psf/black") return False
def check_tool_mkdocs(): """Checks if invoking supplied mkdocs binary works.""" try: subprocess.check_output(["mkdocs", "--version"]) return True except OSError: l.i("Mkdocs is not installed, check: https://www.mkdocs.org/") return False
def check_cpp_formatter(): """Checks if invoking supplied clang-format binary works.""" try: subprocess.check_output(["clang-format", "--version"]) return True except OSError: l.i("Clang-format is not installed, check: https://clang.llvm.org/docs/ClangFormat.html" ) return False
def check_cmake_formatter(): """Checks if invoking supplied cmake-format binary works.""" try: subprocess.check_output(["cmake-format", "--version"]) return True except OSError: l.i("Cmake-format is not installed, check: https://github.com/cheshirekow/cmake_format" ) return False
def run(params): l.i("Cleaning...") proj_path = params["proj_path"] f.remove_dir(os.path.join(proj_path, "build")) f.remove_files(proj_path, "*.pyc", recursive=True) f.remove_files(proj_path, "*.zip", recursive=True) f.remove_files(proj_path, "*.tar", recursive=True) f.remove_files(proj_path, "*.tar.gz", recursive=True) f.remove_files(proj_path, "Thumbs.db", recursive=True) f.remove_files(proj_path, ".DS_Store", recursive=True) f.remove_dirs(proj_path, "__pycache__", recursive=True) l.ok()
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] l.i("Packaging...") if archs and len(archs) > 0: for arch in archs: for build_type in build_types: l.i("Copying for: {0}/{1}...".format(arch["conan_arch"], build_type)) # create folders dist_dir = os.path.join( proj_path, const.DIR_NAME_DIST, target_name, build_type, arch["conan_arch"], ) f.recreate_dir(dist_dir) build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "bin", ) # copy files f.copy_all(build_dir, dist_dir) l.ok() else: l.e('Arch list for "{0}" is invalid or empty'.format(target_name))
def generate(params): proj_path = params["proj_path"] # check modules folder modules_path = os.path.join(proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_MODULES) if not os.path.isdir(modules_path): l.e("Modules folder not exists: {0}".format(modules_path)) # get gluecode modules gluecode_config = config.run(proj_path, None, params) modules = gluecode_config["modules"] if modules: l.i("Generating files for all modules...") for m in modules: if not os.path.isdir( os.path.join(modules_path, m, const.DIR_NAME_GLUECODE)): l.i('Module "{0}" was skipped'.format(m)) continue l.i('Generating glue code files for "{0}"...'.format(m)) func_path = "files.modules.{0}.gluecode.generate.run".format(m) mod_name, func_name = func_path.rsplit(".", 1) mod = importlib.import_module(mod_name) func = getattr(mod, func_name) func(params) l.ok() else: l.e("No modules to generate")
def setup(params): proj_path = params["proj_path"] # version version = ls.get_arg_list_value(params["args"], "--version") if not version or len(version) == 0: version = const.GLUECODE_TOOL_VERSION l.i("Glue code tool version: {0}".format(version)) # check tool folder tool_dir = os.path.join(proj_path, const.DIR_NAME_BUILD, const.DIR_NAME_GLUECODE) f.recreate_dir(tool_dir) # prepare tool data tool_file_path = gluecode.get_tool_path(params) if p.is_windows(): file_url = "https://github.com/cross-language-cpp/djinni-generator/releases/download/v{0}/djinni.bat".format( version ) else: file_url = "https://github.com/cross-language-cpp/djinni-generator/releases/download/v{0}/djinni".format( version ) # prepare tool data try: n.download(file_url, tool_file_path) # add executable permission st = os.stat(tool_file_path) os.chmod(tool_file_path, st.st_mode | stat.S_IEXEC) except Exception as e: l.e("Error when download file {0}: {1}".format(file_url, e)) l.ok()
def download_dist_file( proj_path, version, dist_file_path, dist_file_name, dist_folder, dist_file_url ): # version if not version or len(version) == 0: l.e("You need define version name (parameter: --version)") l.i("Version defined: {0}".format(version)) # remove file l.i("Removing old file...") f.remove_file(dist_file_path) # download file l.i("Downloading {0} file...".format(dist_file_name)) file_url = "{0}/{1}/{2}".format(dist_file_url, version, dist_file_name) try: f.create_dir(os.path.dirname(dist_file_path)) n.download(file_url, dist_file_path) except Exception as e: l.e("Error when download file {0}: {1}".format(file_url, e)) # remove old files and unpack current file l.i("Removing old folder...") f.create_dir(os.path.join(proj_path, const.DIR_NAME_DIST)) f.remove_dir(os.path.join(proj_path, const.DIR_NAME_DIST, dist_folder)) l.i("Unpacking downloaded file...") pack.unpack( dist_file_path, os.path.join(proj_path, const.DIR_NAME_DIST, dist_folder) ) l.ok()
def generate(proj_path, target_name, version, source_files): # version if not version or len(version) == 0: l.e("You need define version name (parameter: --version)") l.i("Version defined: {0}".format(version)) # build dir build_dir = os.path.join(proj_path, const.DIR_NAME_BUILD, target_name, const.DIR_NAME_DIST) l.i("Removing old files...") f.recreate_dir(build_dir) # pack files l.i("Packing {0} files...".format(len(source_files))) dist_file = os.path.join(build_dir, const.FILE_NAME_DIST_PACKED) p.tar_files(dist_file, source_files) l.ok()
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] android_module_name = "library" l.i("Creating AAR library...") if archs and len(archs) > 0: if build_types and len(build_types) > 0: for build_type in build_types: l.i("Creating AAR library for: {0}...".format(build_type)) build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type ) # copy library project template android_library_build_dir = os.path.join(build_dir, "aar") f.recreate_dir(android_library_build_dir) android_project_dir = os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_SUPPORT, "android-aar-project", ) f.copy_dir( android_project_dir, android_library_build_dir, symlinks=True, ) # replace data build_gradle_file = os.path.join( android_library_build_dir, "library", "build.gradle", ) f.replace_in_file( build_gradle_file, "{VERSION}", target_config["version"] ) f.replace_in_file( build_gradle_file, "{VERSION_CODE}", target_config["version_code"] ) # copy support lib files gluecode_support_lib_dir = os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_MODULES, "support-lib", ) f.copy_all( os.path.join(gluecode_support_lib_dir, "java"), os.path.join( android_library_build_dir, android_module_name, "src", "main", "java", ), ) # copy all modules glue code files modules_dir = os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_MODULES, ) modules = module.get_list(proj_path) for m in modules: module_dir = os.path.join( modules_dir, m, const.DIR_NAME_GLUECODE, "generated-src", "java", ) if f.dir_exists(module_dir): f.copy_all( module_dir, os.path.join( android_library_build_dir, android_module_name, "src", "main", "java", ), ) # copy all modules implementation files modules_dir = os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_MODULES, ) modules = module.get_list(proj_path) for m in modules: module_dir = os.path.join( modules_dir, m, "implementation", "java", ) if f.dir_exists(module_dir): f.copy_all( module_dir, os.path.join( android_library_build_dir, android_module_name, "src", "main", "java", ), ) # copy all native libraries for arch in archs: compiled_arch_dir = os.path.join( build_dir, arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", ) target_arch_dir = os.path.join( android_library_build_dir, "library", "src", "main", "jniLibs", arch["arch"], ) f.copy_all(compiled_arch_dir, target_arch_dir) # build aar android_module_dir = os.path.join( android_library_build_dir, android_module_name, ) if p.is_windows(): run_args = [ os.path.join("..", "gradlew.bat"), "bundle{0}Aar".format(build_type), ] else: run_args = [ os.path.join("..", "gradlew"), "bundle{0}Aar".format(build_type), ] r.run(run_args, cwd=android_module_dir) # copy files arr_dir = os.path.join( android_library_build_dir, android_module_name, "build", "outputs", "aar", ) dist_dir = os.path.join( proj_path, const.DIR_NAME_DIST, target_name, build_type ) f.remove_dir(dist_dir) f.copy_all(arr_dir, dist_dir) l.ok() else: l.i('Build type list for "{0}" is invalid or empty'.format(target_name)) else: l.i('Arch list for "{0}" is invalid or empty'.format(target_name))
def generate_xcframework(proj_path, target_name, target_config, archs, build_types): l.i("Packaging xcframework...") if archs and len(archs) > 0: if build_types and len(build_types) > 0: for build_type in build_types: l.i("Generating for: {0}...".format(build_type)) # generate group list groups = [] groups_command = [] for arch in archs: if not arch["group"] in groups: groups.append(arch["group"]) if len(groups) == 0: l.e("Group list are empty, make sure you have defined group name for each arch in config file for this target" ) # generate framework for each group for group in groups: # get first framework data for current group base_framework_arch = None for arch in archs: if arch["group"] == group: base_framework_arch = arch if not base_framework_arch: l.e("Group framework was not found: {0}".format(group)) # copy base framework framework_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, base_framework_arch["group"], base_framework_arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), ) group_xcframework_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, group, "xcframework", "{0}.framework".format(target_config["project_name"]), ) f.remove_dir(group_xcframework_dir) f.copy_all( framework_dir, group_xcframework_dir, ) # generate single framework for group lipo_archs_args = [] for arch in archs: if arch["group"] == group: lipo_archs_args.append( os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format( target_config["project_name"]), target_config["project_name"], )) lipo_args = [ "lipo", "-create", "-output", ] if f.dir_exists( os.path.join( group_xcframework_dir, "Versions", )): lipo_args.extend([ os.path.join( group_xcframework_dir, "Versions", "A", target_config["project_name"], ), ]) else: lipo_args.extend([ os.path.join( group_xcframework_dir, target_config["project_name"], ), ]) lipo_args.extend(lipo_archs_args) r.run(lipo_args, proj_path) # add final framework to group groups_command.append("-framework") groups_command.append(group_xcframework_dir) # generate xcframework xcframework_dir = os.path.join( proj_path, const.DIR_NAME_DIST, target_name, build_type, "{0}.xcframework".format(target_config["project_name"]), ) f.remove_dir(xcframework_dir) xcodebuild_command = ["xcodebuild", "-create-xcframework"] xcodebuild_command += groups_command xcodebuild_command += ["-output", xcframework_dir] r.run(xcodebuild_command, proj_path) # check file l.i("Checking file for: {0}...".format(build_type)) r.run(["ls", xcframework_dir], proj_path) else: l.i('Build type list for "{0}" is invalid or empty'.format( target_name)) else: l.i('Arch list for "{0}" is invalid or empty'.format(target_name))
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] no_framework = ls.list_has_value(params["args"], "--no-framework") no_xcframework = ls.list_has_value(params["args"], "--no-xcframework") # at least one need be generated if no_framework and no_xcframework: l.e("You need let generate framework or xcframework, but both are disabled" ) # remove dist folder for the target dist_dir = os.path.join( proj_path, const.DIR_NAME_DIST, target_name, ) f.remove_dir(dist_dir) # generate framework if not no_framework: generate_framework( proj_path=proj_path, target_name=target_name, target_config=target_config, archs=archs, build_types=build_types, ) # generate xcframework if not no_xcframework: generate_xcframework( proj_path=proj_path, target_name=target_name, target_config=target_config, archs=archs, build_types=build_types, ) # add strip framework script (only required if final project use framework instead of xcframework) l.i("Adding strip framework script...") target_scripts_dir = os.path.join( const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_SUPPORT, "scripts", ) f.copy_dir( target_scripts_dir, os.path.join( const.DIR_NAME_DIST, target_name, "scripts", ), symlinks=True, ) # cocoapods l.i("Adding cocoapods script...") pod_file_path = os.path.join( const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_SUPPORT, "cocoapods", "{0}.podspec".format(target_config["project_name"]), ) target_pod_file_path = os.path.join( const.DIR_NAME_DIST, target_name, "{0}.podspec".format(target_config["project_name"]), ) f.copy_file( pod_file_path, target_pod_file_path, ) # xcframework group dir if not no_xcframework: if build_types and len(build_types) > 0: for build_type in build_types: xcframework_dir = os.path.join( dist_dir, build_type, "{0}.xcframework".format(target_config["project_name"]), ) found_dirs = f.find_dirs(xcframework_dir, "*") if found_dirs: first_group = os.path.basename(found_dirs[0]) f.replace_in_file( target_pod_file_path, "{XCFRAMEWORK_" + build_type.upper() + "_GROUP_DIR}", first_group, ) f.replace_in_file(target_pod_file_path, "{NAME}", target_config["project_name"]) f.replace_in_file(target_pod_file_path, "{VERSION}", target_config["version"]) # finish l.ok()
def generate_framework(proj_path, target_name, target_config, archs, build_types): l.i("Packaging framework...") if archs and len(archs) > 0: if build_types and len(build_types) > 0: for build_type in build_types: l.i("Copying for: {0}...".format(build_type)) # copy first folder for base framework_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, archs[0]["group"], archs[0]["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), ) dist_dir = os.path.join( proj_path, const.DIR_NAME_DIST, target_name, build_type, "{0}.framework".format(target_config["project_name"]), ) f.remove_dir(dist_dir) f.copy_dir( framework_dir, dist_dir, symlinks=True, ) # update info plist file plist_path = os.path.join( proj_path, const.DIR_NAME_DIST, target_name, build_type, "{0}.framework".format(target_config["project_name"]), "Info.plist", ) if os.path.exists(plist_path): # remove supported platforms inside plist r.run( [ "plutil", "-remove", "CFBundleSupportedPlatforms", plist_path, ], proj_path, ) # lipo lipo_archs_args = [] for arch in archs: if is_valid_group(arch["group"]): lipo_archs_args.append( os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format( target_config["project_name"]), target_config["project_name"], )) lipo_args = [ "lipo", "-create", "-output", ] if f.dir_exists(os.path.join( dist_dir, "Versions", )): lipo_args.extend([ os.path.join( dist_dir, "Versions", "A", target_config["project_name"], ), ]) else: lipo_args.extend([ os.path.join( dist_dir, target_config["project_name"], ), ]) lipo_args.extend(lipo_archs_args) r.run(lipo_args, proj_path) # check file l.i("Checking file for: {0}...".format(build_type)) r.run( [ "file", os.path.join(dist_dir, target_config["project_name"]) ], proj_path, ) else: l.i('Build type list for "{0}" is invalid or empty'.format( target_name)) else: l.i('Arch list for "{0}" is invalid or empty'.format(target_name))
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] param_dry_run = ls.list_has_value(params["args"], "--dry-run") if param_dry_run: l.i("Running in dry mode...") if archs and len(archs) > 0: for arch in archs: for build_type in build_types: l.i("Building for: {0}/{1}...".format(arch["conan_arch"], build_type)) # conan build build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, ) clean_build_dir = True if param_dry_run and os.path.isdir(build_dir): clean_build_dir = False if clean_build_dir: f.recreate_dir(build_dir) run_args = [ "conan", "build", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_RECIPE, const.FILE_NAME_FILES_TARGET_CONAN_RECIPE_CONANFILE_PY, ), "--source-folder", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CMAKE, ), "--build-folder", os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, ), "--install-folder", os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["conan_arch"], const.DIR_NAME_BUILD_CONAN, ), ] r.run(run_args, cwd=build_dir) # copy assets if "assets_dir" in target_config: assets_dir = target_config["assets_dir"] assets_dir = os.path.join(proj_path, assets_dir) if os.path.isdir(assets_dir): build_assets_dir = os.path.join( build_dir, "bin", os.path.basename(assets_dir)) f.remove_dir(build_assets_dir) f.copy_dir(assets_dir, build_assets_dir, symlinks=True) l.ok() else: l.e('Arch list for "{0}" is invalid or empty'.format(target_name))
def setup(params): proj_path = params["proj_path"] targets = target.get_all_targets(proj_path) l.i("Creating default profile...") # create default profile r.run( [ "conan", "profile", "new", "default", "--detect", "--force", ], cwd=os.getcwd(), ) # copy all targets profile l.i("Copying files...") if targets: for target_item in targets: files = f.find_files( os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_item, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_PROFILE, ), "*profile", ) if files: conan_profile_dir = os.path.join( f.home_dir(), const.DIR_NAME_HOME_CONAN, const.DIR_NAME_HOME_CONAN_PROFILES, ) for item in files: filename = os.path.basename(item) l.i('Copying profile "{0}"...'.format(filename)) f.copy_file(item, os.path.join(conan_profile_dir, filename)) # add darwin toolchain l.i("Adding darwin toolchain repository...") r.run( [ "conan", "remote", "add", "darwin-toolchain", "https://ezoredrepository.jfrog.io/artifactory/api/conan/conan-local", "--force", ], cwd=os.getcwd(), ) l.ok()
def docs_publish(params): proj_path = params["proj_path"] docs_name = ls.get_arg_list_value(params["args"], "--name") if not docs_name: docs_name = const.DOCS_DEFAULT_NAME docs_path = os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_DOCS, docs_name, ) output_path = os.path.join( proj_path, const.DIR_NAME_BUILD, const.DIR_NAME_BUILD_DOCS, docs_name, ) ignore_files = [".DS_Store", "Thumbs.db"] config_data = config.run(proj_path, params) config_data = config_data[docs_name] has_tool = check_tool_mkdocs() if has_tool: params["target_name"] = "docs" # prepare data version = config_data["version"] if "version" in config_data else None append_version = (config_data["append_version"] if "append_version" in config_data else None) force = ls.list_has_value(params["args"], "--force") aws_key_id = os.getenv(const.AWS_KEY_ID_ENV) aws_secret_key = os.getenv(const.AWS_SECRET_KEY_ENV) aws_bucket_name = config_data["bucket_name"] aws_bucket_path = "{0}".format(config_data["bucket_path"]) if append_version: aws_bucket_path = "{0}/{1}".format(aws_bucket_path, version) # generate files run_args = [ "mkdocs", "build", "--clean", "--config-file", "mkdocs.yml", "-d", output_path, ] r.run(run_args, cwd=docs_path) # version if append_version: if not version or len(version) == 0: l.e("You need define version name (parameter: --version)") l.i("Version defined: {0}".format(version)) # prepare to upload if not os.path.isdir(docs_path): l.e("Documentation output folder not exists: {0}".format( docs_path)) # prepare aws sdk l.i("Initializing AWS bucket and SDK...") if not aws_key_id or not aws_secret_key: l.failed("Your AWS credentials are invalid") s3_client = boto3.client( service_name="s3", aws_secret_access_key=aws_secret_key, aws_access_key_id=aws_key_id, ) # checking for existing path l.i('Checking if remote path "{0}" exists on AWS...'.format( aws_bucket_path, )) has_remote_path = a.s3_path_exists( s3_client, aws_bucket_name, aws_bucket_path, ) if has_remote_path: if force: l.i('The path "{0}" already exists on AWS, removing...'.format( aws_bucket_path)) a.s3_delete_path( s3_client, aws_bucket_name, aws_bucket_path, ) else: l.e('The path "{0}" already exists on AWS'.format( aws_bucket_path)) # create path folder a.s3_create_path( s3_client, aws_bucket_name, aws_bucket_path, ) # upload walks = os.walk(output_path) for source, dirs, files in walks: l.i("Entering directory: {0}".format(source)) for filename in files: if filename in ignore_files: continue local_file_path = os.path.join(source, filename) relative_path = os.path.relpath(local_file_path, output_path) s3_file = os.path.join(aws_bucket_path, relative_path) l.i('Uploading file "{0}" to S3 bucket "{1}"...'.format( relative_path, aws_bucket_name)) extra_args = {} if os.path.isdir(local_file_path): extra_args = { "ACL": "public-read", } elif os.path.isfile(local_file_path): mime_type = mime.guess_type(local_file_path) extra_args = { "ACL": "public-read", "ContentType": (mime_type[0] if mime_type != None and len(mime_type) > 0 and mime_type[0] != None else ""), } s3_client.upload_file( local_file_path, aws_bucket_name, s3_file, ExtraArgs=extra_args, Callback=a.ProgressPercentage(local_file_path), ) if append_version: l.colored( "[DONE] You can access documentation here: {0}/{1}/index.html". format( config_data["url"], version, ), l.BLUE, ) else: l.colored( "[DONE] You can access documentation here: {0}/index.html". format(config_data["url"], ), l.BLUE, ) l.ok()
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] install_headers = target_config["install_headers"] param_dry_run = ls.list_has_value(params["args"], "--dry-run") if param_dry_run: l.i("Running in dry mode...") if archs and len(archs) > 0: for arch in archs: for build_type in build_types: l.i("Building for: {0}/{1}...".format(arch["conan_arch"], build_type)) # conan build build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, ) clean_build_dir = True if param_dry_run and os.path.isdir(build_dir): clean_build_dir = False if clean_build_dir: f.recreate_dir(build_dir) run_args = [ "conan", "build", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_RECIPE, const.FILE_NAME_FILES_TARGET_CONAN_RECIPE_CONANFILE_PY, ), "--source-folder", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CMAKE, ), "--build-folder", os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, ), "--install-folder", os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_CONAN, ), ] r.run(run_args, build_dir) # find correct info plist file plist_path1 = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Info.plist", ) plist_path2 = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Versions", "A", "Resources", "Info.plist", ) plist_path = "" if os.path.exists(plist_path1): plist_path = plist_path1 if os.path.exists(plist_path2): plist_path = plist_path2 # add minimum version inside plist r.run( [ "plutil", "-replace", "MinimumOSVersion", "-string", arch["min_version"], plist_path, ], proj_path, ) # add supported platform inside plist r.run( [ "plutil", "-replace", "CFBundleSupportedPlatforms", "-json", '[ "{0}" ]'.format(arch["supported_platform"]), plist_path, ], proj_path, ) # headers dist_headers_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Headers", ) f.create_dir(dist_headers_dir) if install_headers: for header in install_headers: source_header_dir = os.path.join( proj_path, header["path"]) if header["type"] == "dir": f.copy_dir( source_header_dir, dist_headers_dir, ignore_file=_header_ignore_list, symlinks=True, ) else: l.e("Invalid type for install header list for {0}". format(target_name)) # modules support_modules_dir = os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, "support", "modules", ) modules_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Modules", ) f.recreate_dir(modules_dir) f.copy_file( os.path.join(support_modules_dir, "module.modulemap"), os.path.join(modules_dir, "module.modulemap"), ) # umbrella header build_headers_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Headers", ) header_files = f.find_files( build_headers_dir, "*.h", recursive=True, ) content = f.get_file_contents( os.path.join(support_modules_dir, "umbrella-header.h")) for header_file in header_files: header_file = header_file.replace(build_headers_dir + "/", "") content = content + '#import "{0}"\n'.format(header_file) if len(content) > 0: umbrella_file = os.path.join( build_headers_dir, target_config["umbrella_header"]) f.copy_file( os.path.join(support_modules_dir, "umbrella-header.h"), umbrella_file, ) f.set_file_content(umbrella_file, content) else: l.e("{0}".format( "File not generated because framework headers is empty" )) l.ok() else: l.e('Arch list for "{0}" is invalid or empty'.format(target_name))
def code_format(params): proj_path = params["proj_path"] targets = target.get_all_targets(proj_path) # format c++ files has_tool = check_cpp_formatter() if has_tool: path_list = [ { "path": os.path.join(proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_MODULES), "patterns": ["*.cpp", "*.hpp", "*.c", "*.h", "*.m", "*.mm"], }, { "path": os.path.join(proj_path, const.DIR_NAME_PROJECTS, "others"), "patterns": ["*.cpp", "*.hpp", "*.c", "*.h", "*.m", "*.mm"], }, { "path": os.path.join(proj_path, const.DIR_NAME_PROJECTS, "android"), "patterns": ["*.cpp", "*.hpp", "*.c", "*.h", "*.m", "*.mm"], }, { "path": os.path.join(proj_path, const.DIR_NAME_PROJECTS, "ios", "Sample", "Sample"), "patterns": ["*.cpp", "*.hpp", "*.c", "*.h", "*.m", "*.mm"], }, ] if path_list: l.i("Formating C++ files...") for path_list_item in path_list: patterns = path_list_item["patterns"] for pattern_item in patterns: files = f.find_files(path_list_item["path"], pattern_item, recursive=True) for file_item in files: l.i("Formatting file: {0}...".format( os.path.relpath(file_item))) run_args = [ "clang-format", "-style", "file", "-i", file_item ] r.run(run_args, cwd=proj_path) l.ok() else: l.e("No C++ files found to format") # format python files has_tool = check_python_formatter() if has_tool: path_list = [ { "path": os.path.join(proj_path, "make.py"), }, { "path": os.path.join(proj_path, const.DIR_NAME_FILES), "patterns": ["*.py"], }, ] if path_list: l.i("Formating Python files...") for path_list_item in path_list: patterns = (path_list_item["patterns"] if "patterns" in path_list_item else None) if patterns: for pattern_item in patterns: files = f.find_files(path_list_item["path"], pattern_item, recursive=True) for file_item in files: l.i("Formatting file: {0}...".format( os.path.relpath(file_item))) run_args = ["black", "-q", file_item] r.run(run_args, cwd=proj_path) else: file_item = (path_list_item["path"] if "path" in path_list_item else None) if file_item: l.i("Formatting file: {0}...".format( os.path.relpath(file_item))) run_args = ["black", "-q", file_item] r.run(run_args, cwd=proj_path) l.ok() else: l.e("No Python files found to format") # format cmake files has_tool = check_cmake_formatter() if has_tool: path_list = [ { "path": os.path.join(proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_MODULES), "patterns": ["*.cmake"], }, { "path": os.path.join(proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_MODULES), "patterns": ["CMakeLists.txt"], }, { "path": os.path.join(proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_COMMON), "patterns": ["*.cmake"], }, { "path": os.path.join(proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_COMMON), "patterns": ["CMakeLists.txt"], }, ] for target_name in targets: path_list.extend([ { "path": os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CMAKE, ), "patterns": ["*.cmake"], }, { "path": os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CMAKE, ), "patterns": ["CMakeLists.txt"], }, { "path": os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, ), "patterns": ["*.cmake"], }, { "path": os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, ), "patterns": ["CMakeLists.txt"], }, { "path": os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_SUPPORT, ), "patterns": ["*.cmake"], }, { "path": os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_SUPPORT, ), "patterns": ["CMakeLists.txt"], }, { "path": os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_VERBS, ), "patterns": ["*.cmake"], }, { "path": os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_VERBS, ), "patterns": ["CMakeLists.txt"], }, ]) if path_list: l.i("Formating CMake files...") for path_list_item in path_list: patterns = path_list_item["patterns"] for pattern_item in patterns: files = f.find_files(path_list_item["path"], pattern_item, recursive=True) for file_item in files: l.i("Formatting file: {0}...".format( os.path.relpath(file_item))) run_args = [ "cmake-format", "-c", ".cmake-format", "-i", file_item, ] r.run(run_args, cwd=proj_path) l.ok() else: l.e("No CMake files found to format")
def run(params): args = params["args"] proj_path = params["proj_path"] targets = target.get_all_targets(proj_path) show_target_list = False if len(args) > 0: target_item = args[0] args.pop(0) if target_item in targets: target_verbs = target.get_all_target_verbs(proj_path, target_item) target_verbs = list( ls.filter_list(target_verbs, const.TARGET_VERBS_INTERNAL)) show_target_verb_list = False if len(args) > 0: verb_name = args[0] if verb_name in target_verbs: l.i('Running "{0}" on target "{1}"...'.format( verb_name, target_item)) target_verb_folder = os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_item, const.DIR_NAME_FILES_TARGET_VERBS, ) params["target_name"] = target_item r.run_external( path=target_verb_folder, module_name=verb_name, command_name="run", command_params=params, show_log=False, show_error_log=True, throw_error=True, ) else: show_target_verb_list = True else: show_target_verb_list = True if show_target_verb_list: if target_verbs and len(target_verbs) > 0: l.colored("List of available target verbs:\n", l.MAGENTA) for target_verb in target_verbs: l.m(" - {0}".format(target_verb)) else: l.e("No target verbs available") else: show_target_list = True else: show_target_list = True if show_target_list: show_help(params)
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] if archs and len(archs) > 0: for arch in archs: for build_type in build_types: l.i("Building for: {0}/{1}...".format(arch["conan_arch"], build_type)) # conan install build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_CONAN, ) f.recreate_dir(build_dir) run_args = [ "conan", "install", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_RECIPE, const.FILE_NAME_FILES_TARGET_CONAN_RECIPE_CONANFILE_PY, ), "-pr:b", target.get_build_profile(), "-pr:h", arch["conan_profile"], "-s:h", "arch={0}".format(arch["conan_arch"]), "-s:h", "build_type={0}".format(build_type), "-s:h", "os.version={0}".format(arch["min_version"]), "-o", "ezored_arch={0}".format(arch["conan_arch"]), "-o", "ezored_name={0}".format(target_config["project_name"]), "-o", "ezored_version={0}".format(target_config["version"]), "-o", "ezored_group={0}".format(arch["group"]), ] # extra run args if "enable_bitcode" in arch: run_args.append("-o:h") run_args.append( "darwin-toolchain:enable_bitcode={0}".format( arch["enable_bitcode"])) if "enable_arc" in arch: run_args.append("-o:h") run_args.append("darwin-toolchain:enable_arc={0}".format( arch["enable_arc"])) if "enable_visibility" in arch: run_args.append("-o:h") run_args.append( "darwin-toolchain:enable_visibility={0}".format( arch["enable_visibility"])) if "subsystem_ios_version" in arch: run_args.append("-s:h") run_args.append("os.subsystem.ios_version={0}".format( arch["subsystem_ios_version"])) # final run args run_args.append("--build=missing") run_args.append("--update") r.run(run_args, cwd=build_dir) l.ok() else: l.e('Arch list for "{0}" is invalid or empty'.format(target_name))