def generate(params): proj_path = params["proj_path"] # check gluecode folder gluecode_modules_path = os.path.join(proj_path, const.DIR_NAME_FILES, const.DIR_NAME_GLUECODE) if not os.path.isdir(gluecode_modules_path): log.error("Glue code modules folder not exists: {0}".format( gluecode_modules_path)) # get gluecode modules gluecode_config = config.run(proj_path, None, params) modules = gluecode_config["modules"] if modules: log.info("Generating files for all modules...") for module in modules: log.info('Generating glue code files for "{0}"...'.format(module)) func_path = "files.gluecode.{0}.generate.run".format(module) mod_name, func_name = func_path.rsplit(".", 1) mod = importlib.import_module(mod_name) func = getattr(mod, func_name) func(params) log.ok() else: log.error("No glue code modules to generate")
def generate(params): proj_path = params["proj_path"] module_data = params["module_data"] if not module_data: log.error("Module data is invalid") # check required tool gluecode_tool_path = get_tool_path(params) if not os.path.isfile(gluecode_tool_path): log.error("Glue code tool was not found: {0}".format(gluecode_tool_path)) # module data module_name = module_data["name"] tool_params = module_data["tool_params"] module_dir = os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_GLUECODE, module_name ) # clean old generated src file.remove_dir(os.path.join(module_dir, "generated-src")) file.remove_dir(os.path.join(module_dir, "yaml")) # run runner_args = [] runner_args.extend([gluecode_tool_path]) runner_args.extend(tool_params) runner.run_as_shell( args=" ".join(runner_args), cwd=module_dir, )
def version(params): tool_path = gluecode.get_tool_path(params) if not os.path.isfile(tool_path): log.error("Glue code tool was not found: {0}".format(tool_path)) runner.run_as_shell("{0} --version".format(tool_path), cwd=os.getcwd())
def download(proj_path, version, dist_file_path, dist_file_name, dist_folder, aws_s3_url): # version if not version or len(version) == 0: log.error("You need define version name (parameter: --version)") log.info("Version defined: {0}".format(version)) # remove file log.info("Removing old file...") file.remove_file(dist_file_path) # download file log.info("Downloading {0} file...".format(dist_file_name)) file_url = "{0}/{1}/{2}".format(aws_s3_url, version, dist_file_name) try: net.download(file_url, dist_file_path) except Exception as e: log.error("Error when download file {0}: {1}".format(file_url, e)) # remove old files and unpack current file log.info("Removing old folder...") file.create_dir(os.path.join(proj_path, const.DIR_NAME_DIST)) file.remove_dir(os.path.join(proj_path, const.DIR_NAME_DIST, dist_folder)) log.info("Unpacking downloaded file...") pack.unpack(dist_file_path, os.path.join(proj_path, const.DIR_NAME_DIST, dist_folder)) log.ok("")
def run(args, cwd): ret = subprocess.call(args, cwd=cwd) if ret > 0: log.normal("{2}COMMAND:{3} {0}\n" "{4}WORKING DIR:{5} {1}".format(" ".join(args), cwd, log.YELLOW, log.ENDC, log.YELLOW, log.ENDC)) log.error("Command execution has failed")
def show_help(params): proj_path = params["proj_path"] targets = target.get_all_targets(proj_path) if targets and len(targets) > 0: log.colored("List of available targets:\n", log.PURPLE) for target_item in targets: log.normal(" - {0}".format(target_item)) else: log.error("No targets available")
def run_as_shell(args, cwd): ret = subprocess.call(args, cwd=cwd, shell=True) if ret > 0: if not isinstance(args, str): args = " ".join(args) log.normal("{2}COMMAND:{3} {0}\n" "{4}WORKING DIR:{5} {1}".format(args, cwd, log.YELLOW, log.ENDC, log.YELLOW, log.ENDC)) log.error("Command execution has failed")
def s3_prefix_delete(s3, bucket, key, aws_secret_access_key, aws_access_key_id): import boto3 from botocore.exceptions import ClientError try: s3_resource = boto3.resource( "s3", aws_secret_access_key=aws_secret_access_key, aws_access_key_id=aws_access_key_id, ) s3_resource.Object(bucket, key).delete() except Exception as e: log.error("Failed to delete key {0} from AWS S3: {1}".format(key, e)) return True
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] log.info("Packaging...") if archs and len(archs) > 0: for arch in archs: for build_type in build_types: log.info( "Copying for: {0}/{1}...".format(arch["conan_arch"], build_type) ) # create folders dist_dir = os.path.join( proj_path, const.DIR_NAME_DIST, target_name, build_type, arch["conan_arch"], ) file.remove_dir(dist_dir) file.create_dir(dist_dir) build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "bin", ) # copy files file.copy_all_inside(build_dir, dist_dir) log.ok() else: log.error('Arch list for "{0}" is invalid or empty'.format(target_name))
def get_all_targets(proj_path): results = [] targets_path = os.path.join(proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS) if not os.path.isdir(targets_path): log.error("Target folder not exists: {0}".format(targets_path)) targets = file.find_dirs_simple(targets_path, "*") if targets: for target_path in targets: target_name = os.path.basename(target_path) if target_name: results.append(target_name) results.sort() return results
def setup(params): proj_path = params["proj_path"] # version version = util.get_arg_value("--version", params["args"]) if not version or len(version) == 0: version = const.GLUECODE_TOOL_VERSION log.info("Glue code tool version: {0}".format(version)) # check tool folder tool_dir = os.path.join(proj_path, const.DIR_NAME_BUILD, const.DIR_NAME_GLUECODE) file.remove_dir(tool_dir) file.create_dir(tool_dir) # prepare tool data tool_file_path = gluecode.get_tool_path(params) if util.is_windows_platform(): file_url = "https://github.com/cross-language-cpp/djinni-generator/releases/download/v{0}/djinni.bat".format( version) else: file_url = "https://github.com/cross-language-cpp/djinni-generator/releases/download/v{0}/djinni".format( version) # prepare tool data try: net.download(file_url, tool_file_path) # add executable permission st = os.stat(tool_file_path) os.chmod(tool_file_path, st.st_mode | stat.S_IEXEC) except Exception as e: log.error("Error when download file {0}: {1}".format(file_url, e)) log.ok()
def generate(proj_path, target_name, version, source_files): # version if not version or len(version) == 0: log.error("You need define version name (parameter: --version)") log.info("Version defined: {0}".format(version)) # build dir build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, const.DIR_NAME_DIST ) log.info("Removing old files...") file.remove_dir(build_dir) file.create_dir(build_dir) # pack files log.info("Packing {0} files...".format(len(source_files))) dist_file = os.path.join(build_dir, const.FILE_NAME_DIST_PACKED) tar_files(dist_file, source_files) log.ok("")
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] param_dry_run = util.list_has_key(params["args"], "--dry-run") if param_dry_run: log.info("Running in dry mode...") if archs and len(archs) > 0: for arch in archs: for build_type in build_types: log.info( "Building for: {0}/{1}...".format(arch["conan_arch"], build_type) ) # conan build build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, ) clean_build_dir = True if param_dry_run and os.path.isdir(build_dir): clean_build_dir = False if clean_build_dir: file.remove_dir(build_dir) file.create_dir(build_dir) run_args = [ "conan", "build", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_RECIPE, const.FILE_NAME_FILES_TARGET_CONAN_RECIPE_CONANFILE_PY, ), "--source-folder", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CMAKE, ), "--build-folder", os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, ), "--install-folder", os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["conan_arch"], const.DIR_NAME_BUILD_CONAN, ), ] runner.run(run_args, build_dir) # copy assets if "assets_dir" in target_config: assets_dir = target_config["assets_dir"] assets_dir = os.path.join(proj_path, assets_dir) if os.path.isdir(assets_dir): build_assets_dir = os.path.join( build_dir, "bin", os.path.basename(assets_dir) ) file.remove_dir(build_assets_dir) file.copy_dir(assets_dir, build_assets_dir, symlinks=True) log.ok() else: log.error('Arch list for "{0}" is invalid or empty'.format(target_name))
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] if archs and len(archs) > 0: for arch in archs: for build_type in build_types: log.info("Building for: {0}/{1}...".format( arch["conan_arch"], build_type)) # conan install build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["conan_arch"], const.DIR_NAME_BUILD_CONAN, ) file.remove_dir(build_dir) file.create_dir(build_dir) run_args = [ "conan", "install", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_RECIPE, const.FILE_NAME_FILES_TARGET_CONAN_RECIPE_CONANFILE_PY, ), "-pr:b", target.get_build_profile(), "-pr:h", arch["conan_profile"], "-s:h", "arch={0}".format(arch["conan_arch"]), "-s:h", "os.api_level={0}".format(arch["api_level"]), "-s:h", "build_type={0}".format(build_type), "-o", "ezored_arch={0}".format(arch["conan_arch"]), "-o", "ezored_name={0}".format(target_config["project_name"]), "-o", "ezored_version={0}".format(target_config["version"]), "--build=missing", "--update", ] runner.run(run_args, build_dir) log.ok() else: log.error( 'Arch list for "{0}" is invalid or empty'.format(target_name))
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] install_headers = target_config["install_headers"] param_dry_run = util.list_has_key(params["args"], "--dry-run") if param_dry_run: log.info("Running in dry mode...") if archs and len(archs) > 0: for arch in archs: for build_type in build_types: log.info("Building for: {0}/{1}...".format( arch["conan_arch"], build_type)) # conan build build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, ) clean_build_dir = True if param_dry_run and os.path.isdir(build_dir): clean_build_dir = False if clean_build_dir: file.remove_dir(build_dir) file.create_dir(build_dir) run_args = [ "conan", "build", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_RECIPE, const.FILE_NAME_FILES_TARGET_CONAN_RECIPE_CONANFILE_PY, ), "--source-folder", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CMAKE, ), "--build-folder", os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, ), "--install-folder", os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_CONAN, ), ] runner.run(run_args, build_dir) # find correct info plist file plist_path1 = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Info.plist", ) plist_path2 = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Versions", "Current", "Resources", "Info.plist", ) plist_path = "" if os.path.exists(plist_path1): plist_path = plist_path1 if os.path.exists(plist_path2): plist_path = plist_path2 # add minimum version inside plist runner.run( [ "plutil", "-replace", "MinimumOSVersion", "-string", arch["min_version"], plist_path, ], proj_path, ) # add supported platform inside plist runner.run( [ "plutil", "-replace", "CFBundleSupportedPlatforms", "-json", '[ "{0}" ]'.format(arch["supported_platform"]), plist_path, ], proj_path, ) # headers dist_headers_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Headers", ) file.create_dir(dist_headers_dir) if install_headers: for header in install_headers: source_header_dir = os.path.join( proj_path, header["path"]) if header["type"] == "dir": file.copy_dir( source_header_dir, dist_headers_dir, ignore_file=_header_ignore_list, symlinks=True, ) else: log.error( "Invalid type for install header list for {0}". format(target_name)) # modules support_modules_dir = os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, "support", "modules", ) modules_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Modules", ) file.remove_dir(modules_dir) file.create_dir(modules_dir) file.copy_file( os.path.join(support_modules_dir, "module.modulemap"), os.path.join(modules_dir, "module.modulemap"), ) # umbrella header build_headers_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Headers", ) header_files = file.find_files(build_headers_dir, "*.h") content = file.read_file( os.path.join(support_modules_dir, "umbrella-header.h")) for header_file in header_files: header_file = header_file.replace(build_headers_dir + "/", "") content = content + '#import "{0}"\n'.format(header_file) if len(content) > 0: umbrella_file = os.path.join( build_headers_dir, target_config["umbrella_header"]) file.copy_file( os.path.join(support_modules_dir, "umbrella-header.h"), umbrella_file, ) file.write_to_file(umbrella_file, content) else: log.error("{0}".format( "File not generated because framework headers is empty" )) log.ok() else: log.error( 'Arch list for "{0}" is invalid or empty'.format(target_name))
def upload( proj_path, version, force, dist_file_path, dist_file_name, dist_folder, aws_key_id, aws_secret_key, aws_bucket_name, aws_bucket_path, ): import boto3 # version if not version or len(version) == 0: log.error("You need define version name (parameter: --version)") log.info("Version defined: {0}".format(version)) # prepare to upload if not os.path.isfile(dist_file_path): log.error("Distribution file not exists: {0}".format(dist_file_path)) # prepare aws sdk log.info("Initializing AWS bucket and SDK...") if not aws_key_id or not aws_secret_key: log.fail("Your AWS credentials are invalid") s3_client = boto3.client( service_name="s3", aws_secret_access_key=aws_secret_key, aws_access_key_id=aws_key_id, ) # checking for existing version log.info("Checking if version exists...") object_name = "{0}/{1}/{2}".format( aws_bucket_path, version, dist_file_name, ) has_version = s3_key_exists(s3_client, aws_bucket_name, object_name) if has_version: if force: log.info( "The version {0} already exists, removing...".format(version)) s3_prefix_delete( s3_client, aws_bucket_name, object_name, aws_secret_key, aws_key_id, ) else: log.error("The version {0} already exists".format(version)) # upload log.info('Uploading file "{0}" to S3 bucket "{1}"...'.format( dist_file_path, aws_bucket_name)) s3_client.upload_file( dist_file_path, aws_bucket_name, object_name, ExtraArgs={"ACL": "public-read"}, Callback=ProgressPercentage(dist_file_path), ) log.normal("") log.ok("")
def generate_xcframework(proj_path, target_name, target_config, archs, build_types): log.info("Packaging xcframework...") if archs and len(archs) > 0: if build_types and len(build_types) > 0: for build_type in build_types: log.info("Generating for: {0}...".format(build_type)) # generate group list groups = [] groups_command = [] for arch in archs: if not arch["group"] in groups: groups.append(arch["group"]) groups_command.append("-framework") groups_command.append( os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], "xcframework", "{0}.framework".format( target_config["project_name"]), )) if len(groups) == 0: log.error( "Group list are empty, make sure you have defined group name for each arch in config file for this target" ) # generate framework for each group for group in groups: # get first framework data for current group base_framework_arch = None for arch in archs: if arch["group"] == group: base_framework_arch = arch if not base_framework_arch: log.error( "Group framework was not found: {0}".format(group)) # copy base framework framework_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, base_framework_arch["group"], base_framework_arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), ) group_xcframework_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, group, "xcframework", "{0}.framework".format(target_config["project_name"]), ) file.remove_dir(group_xcframework_dir) file.copy_dir(framework_dir, group_xcframework_dir, symlinks=True) # generate single framework for group lipo_archs_args = [] for arch in archs: if arch["group"] == group: lipo_archs_args.append( os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format( target_config["project_name"]), target_config["project_name"], )) lipo_args = [ "lipo", "-create", "-output", os.path.join(group_xcframework_dir, target_config["project_name"]), ] lipo_args.extend(lipo_archs_args) runner.run(lipo_args, proj_path) # generate xcframework xcframework_dir = os.path.join( proj_path, const.DIR_NAME_DIST, target_name, build_type, "{0}.xcframework".format(target_config["project_name"]), ) file.remove_dir(xcframework_dir) xcodebuild_command = ["xcodebuild", "-create-xcframework"] xcodebuild_command += groups_command xcodebuild_command += ["-output", xcframework_dir] runner.run(xcodebuild_command, proj_path) # check file log.info("Checking file for: {0}...".format(build_type)) runner.run(["ls", xcframework_dir], proj_path) else: log.info('Build type list for "{0}" is invalid or empty'.format( target_name)) else: log.info('Arch list for "{0}" is invalid or empty'.format(target_name))
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] no_framework = util.list_has_key(params["args"], "--no-framework") no_xcframework = util.list_has_key(params["args"], "--no-xcframework") # at least one need be generated if no_framework and no_xcframework: log.error( "You need let generate framework or xcframework, but both are disabled" ) # remove dist folder for the target dist_dir = os.path.join( proj_path, const.DIR_NAME_DIST, target_name, ) file.remove_dir(dist_dir) # generate framework if not no_framework: generate_framework( proj_path=proj_path, target_name=target_name, target_config=target_config, archs=archs, build_types=build_types, ) # generate xcframework if not no_xcframework: generate_xcframework( proj_path=proj_path, target_name=target_name, target_config=target_config, archs=archs, build_types=build_types, ) # add strip framework script (only required if final project use framework instead of xcframework) log.info("Adding strip framework script...") target_scripts_dir = os.path.join( const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_SUPPORT, "scripts", ) file.copy_dir( target_scripts_dir, os.path.join( const.DIR_NAME_DIST, target_name, "scripts", ), ) # cocoapods log.info("Adding cocoapods script...") pod_file_path = os.path.join( const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_SUPPORT, "cocoapods", "{0}.podspec".format(target_config["project_name"]), ) target_pod_file_path = os.path.join( const.DIR_NAME_DIST, target_name, "{0}.podspec".format(target_config["project_name"]), ) file.copy_file( pod_file_path, target_pod_file_path, ) file.replace_in_file(target_pod_file_path, "{NAME}", target_config["project_name"]) file.replace_in_file(target_pod_file_path, "{VERSION}", target_config["version"]) # finish log.ok()
def run(params): args = params["args"] proj_path = params["proj_path"] targets = target.get_all_targets(proj_path) show_target_list = False if len(args) > 0: target_item = args[0] args.pop(0) if target_item in targets: target_verbs = target.get_all_target_verbs(proj_path, target_item) target_verbs = list( util.filter_list(target_verbs, const.TARGET_VERBS_INTERNAL)) show_target_verb_list = False if len(args) > 0: verb_name = args[0] if verb_name in target_verbs: log.info('Running "{0}" on target "{1}"...'.format( verb_name, target_item)) target_verb_folder = os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_item, const.DIR_NAME_FILES_TARGET_VERBS, ) params["target_name"] = target_item runner.run_external( path=target_verb_folder, module_name=verb_name, command_name="run", command_params=params, show_log=False, show_error_log=True, throw_error=True, ) else: show_target_verb_list = True else: show_target_verb_list = True if show_target_verb_list: if target_verbs and len(target_verbs) > 0: log.colored("List of available target verbs:\n", log.PURPLE) for target_verb in target_verbs: log.normal(" - {0}".format(target_verb)) else: log.error("No target verbs available") else: show_target_list = True else: show_target_list = True if show_target_list: show_help(params)
def code_format(params): proj_path = params["proj_path"] # format c++ files has_tool = check_cpp_formatter() if has_tool: dir_list = [ { "path": os.path.join(proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_SRC), "patterns": ["*.cpp", "*.hpp"], }, { "path": os.path.join(proj_path, const.DIR_NAME_PROJECTS), "patterns": ["*.cpp", "*.hpp"], }, ] if dir_list: log.info("Formating C++ files...") for dir_item in dir_list: patterns = dir_item["patterns"] for pattern_item in patterns: files = file.find_files(dir_item["path"], pattern_item) for file_item in files: log.info('Formatting file "{0}"...'.format( os.path.relpath(file_item))) run_args = [ "clang-format", "-style", "file", "-i", file_item ] runner.run(run_args, proj_path) log.ok() else: log.error("No C++ files found to format") # format python files has_tool = check_php_formatter() if has_tool: dir_list = [ { "path": proj_path, "patterns": ["make.py"] }, { "path": os.path.join(proj_path, const.DIR_NAME_FILES), "patterns": ["*.py"], }, ] if dir_list: log.info("Formating Python files...") for dir_item in dir_list: patterns = dir_item["patterns"] for pattern_item in patterns: files = file.find_files(dir_item["path"], pattern_item) for file_item in files: log.info('Formatting file "{0}"...'.format( os.path.relpath(file_item))) run_args = ["black", "-q", file_item] runner.run(run_args, proj_path) log.ok() else: log.error("No Python files found to format")
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] if archs and len(archs) > 0: for arch in archs: for build_type in build_types: log.info("Building for: {0}/{1}...".format( arch["conan_arch"], build_type)) # conan install build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_CONAN, ) file.remove_dir(build_dir) file.create_dir(build_dir) run_args = [ "conan", "install", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_RECIPE, const.FILE_NAME_FILES_TARGET_CONAN_RECIPE_CONANFILE_PY, ), "--profile", arch["conan_profile"], "-s", "arch={0}".format(arch["conan_arch"]), "-s", "build_type={0}".format(build_type), "-s", "os.version={0}".format(arch["min_version"]), "-o", "ezored_arch={0}".format(arch["conan_arch"]), "-o", "ezored_name={0}".format(target_config["project_name"]), "-o", "ezored_version={0}".format(target_config["version"]), "-o", "ezored_group={0}".format(arch["group"]), "-o", "darwin-toolchain:enable_bitcode={0}".format( (arch["enable_bitcode"] if "enable_bitcode" in arch else None)), "-o", "darwin-toolchain:enable_arc={0}".format( (arch["enable_arc"] if "enable_arc" in arch else None)), "-o", "darwin-toolchain:enable_visibility={0}".format( (arch["enable_visibility"] if "enable_visibility" in arch else None)), "--build=missing", "--update", ] runner.run(run_args, build_dir) log.ok() else: log.error( 'Arch list for "{0}" is invalid or empty'.format(target_name))
def run_external( path, module_name, command_name, command_params, show_log=False, show_error_log=False, throw_error=False, ): """ Execute external command inside path and return the command result. :param path: path where python file is located :param module_name: module name :param command_name: command name :param command_params: command params :param show_log: show log :param show_error_log: show log if exception :param throw_error: throw error if exception :return: command result """ result = None sys_path = list(sys.path) original_cwd = os.getcwd() target_module = None command = None try: sys.path.insert(0, path) target_module = importlib.import_module(module_name) command = getattr(target_module, command_name) result = command(params=command_params) if show_log: log.normal( 'Command "{0}" finished with success'.format(command_name)) except Exception as e: if show_error_log: log.error( 'Error while call "{0}" on module "{1}": {2}'.format( command_name, module_name, e), fatal=(not throw_error), ) if throw_error: raise finally: if module_name in sys.modules: del sys.modules[module_name] if target_module is not None: del target_module if command is not None: del command sys.path = sys_path os.chdir(original_cwd) return result