def generate(params): proj_path = params["proj_path"] # check gluecode folder gluecode_modules_path = os.path.join(proj_path, const.DIR_NAME_FILES, const.DIR_NAME_GLUECODE) if not os.path.isdir(gluecode_modules_path): log.error("Glue code modules folder not exists: {0}".format( gluecode_modules_path)) # get gluecode modules gluecode_config = config.run(proj_path, None, params) modules = gluecode_config["modules"] if modules: log.info("Generating files for all modules...") for module in modules: log.info('Generating glue code files for "{0}"...'.format(module)) func_path = "files.gluecode.{0}.generate.run".format(module) mod_name, func_name = func_path.rsplit(".", 1) mod = importlib.import_module(mod_name) func = getattr(mod, func_name) func(params) log.ok() else: log.error("No glue code modules to generate")
def download(proj_path, version, dist_file_path, dist_file_name, dist_folder, aws_s3_url): # version if not version or len(version) == 0: log.error("You need define version name (parameter: --version)") log.info("Version defined: {0}".format(version)) # remove file log.info("Removing old file...") file.remove_file(dist_file_path) # download file log.info("Downloading {0} file...".format(dist_file_name)) file_url = "{0}/{1}/{2}".format(aws_s3_url, version, dist_file_name) try: net.download(file_url, dist_file_path) except Exception as e: log.error("Error when download file {0}: {1}".format(file_url, e)) # remove old files and unpack current file log.info("Removing old folder...") file.create_dir(os.path.join(proj_path, const.DIR_NAME_DIST)) file.remove_dir(os.path.join(proj_path, const.DIR_NAME_DIST, dist_folder)) log.info("Unpacking downloaded file...") pack.unpack(dist_file_path, os.path.join(proj_path, const.DIR_NAME_DIST, dist_folder)) log.ok("")
def check_php_formatter(): """Checks if invoking supplied black binary works.""" try: subprocess.check_output(["black", "--version"]) return True except OSError: log.info("Black is not installed, check: https://github.com/psf/black") return False
def check_cpp_formatter(): """Checks if invoking supplied clang-format binary works.""" try: subprocess.check_output(["clang-format", "--version"]) return True except OSError: log.info( "Clang-format is not installed, check: https://clang.llvm.org/docs/ClangFormat.html" ) return False
def run(params): log.info("Cleaning...") proj_path = params["proj_path"] file.remove_dir(os.path.join(proj_path, "build")) file.purge_files(proj_path, "*.pyc") file.purge_files(proj_path, "*.zip") file.purge_files(proj_path, "*.tar") file.purge_files(proj_path, "*.tar.gz") file.purge_files(proj_path, "Thumbs.db") file.purge_files(proj_path, ".DS_Store") file.purge_dirs(proj_path, "__pycache__") log.ok("")
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] log.info("Packaging...") if archs and len(archs) > 0: for arch in archs: for build_type in build_types: log.info( "Copying for: {0}/{1}...".format(arch["conan_arch"], build_type) ) # create folders dist_dir = os.path.join( proj_path, const.DIR_NAME_DIST, target_name, build_type, arch["conan_arch"], ) file.remove_dir(dist_dir) file.create_dir(dist_dir) build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "bin", ) # copy files file.copy_all_inside(build_dir, dist_dir) log.ok() else: log.error('Arch list for "{0}" is invalid or empty'.format(target_name))
def setup(params): proj_path = params["proj_path"] # version version = util.get_arg_value("--version", params["args"]) if not version or len(version) == 0: version = const.GLUECODE_TOOL_VERSION log.info("Glue code tool version: {0}".format(version)) # check tool folder tool_dir = os.path.join(proj_path, const.DIR_NAME_BUILD, const.DIR_NAME_GLUECODE) file.remove_dir(tool_dir) file.create_dir(tool_dir) # prepare tool data tool_file_path = gluecode.get_tool_path(params) if util.is_windows_platform(): file_url = "https://github.com/cross-language-cpp/djinni-generator/releases/download/v{0}/djinni.bat".format( version) else: file_url = "https://github.com/cross-language-cpp/djinni-generator/releases/download/v{0}/djinni".format( version) # prepare tool data try: net.download(file_url, tool_file_path) # add executable permission st = os.stat(tool_file_path) os.chmod(tool_file_path, st.st_mode | stat.S_IEXEC) except Exception as e: log.error("Error when download file {0}: {1}".format(file_url, e)) log.ok()
def download(url, dst_file): req = urllib2.Request(url, headers={"User-Agent": "Mozilla/5.0"}) u = urllib2.urlopen(req) with open(dst_file, "wb") as f: meta = u.info() meta_func = meta.getheaders if hasattr(meta, "getheaders") else meta.get_all meta_length = meta_func("Content-Length") file_size = None if meta_length: file_size = int(meta_length[0]) if file_size: log.info("Download file size: {0}".format( util.readable_file_size(file_size))) file_size_dl = 0 block_sz = 8192 block_count = 0 while True: dbuffer = u.read(block_sz) if not dbuffer: break dbuffer_len = len(dbuffer) file_size_dl += dbuffer_len block_count += 1 f.write(dbuffer) download_hook(block_count, block_sz, file_size) sys.stdout.flush() log.normal("")
def generate(proj_path, target_name, version, source_files): # version if not version or len(version) == 0: log.error("You need define version name (parameter: --version)") log.info("Version defined: {0}".format(version)) # build dir build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, const.DIR_NAME_DIST ) log.info("Removing old files...") file.remove_dir(build_dir) file.create_dir(build_dir) # pack files log.info("Packing {0} files...".format(len(source_files))) dist_file = os.path.join(build_dir, const.FILE_NAME_DIST_PACKED) tar_files(dist_file, source_files) log.ok("")
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] param_dry_run = util.list_has_key(params["args"], "--dry-run") if param_dry_run: log.info("Running in dry mode...") if archs and len(archs) > 0: for arch in archs: for build_type in build_types: log.info( "Building for: {0}/{1}...".format(arch["conan_arch"], build_type) ) # conan build build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, ) clean_build_dir = True if param_dry_run and os.path.isdir(build_dir): clean_build_dir = False if clean_build_dir: file.remove_dir(build_dir) file.create_dir(build_dir) run_args = [ "conan", "build", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_RECIPE, const.FILE_NAME_FILES_TARGET_CONAN_RECIPE_CONANFILE_PY, ), "--source-folder", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CMAKE, ), "--build-folder", os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, ), "--install-folder", os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["conan_arch"], const.DIR_NAME_BUILD_CONAN, ), ] runner.run(run_args, build_dir) # copy assets if "assets_dir" in target_config: assets_dir = target_config["assets_dir"] assets_dir = os.path.join(proj_path, assets_dir) if os.path.isdir(assets_dir): build_assets_dir = os.path.join( build_dir, "bin", os.path.basename(assets_dir) ) file.remove_dir(build_assets_dir) file.copy_dir(assets_dir, build_assets_dir, symlinks=True) log.ok() else: log.error('Arch list for "{0}" is invalid or empty'.format(target_name))
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] if archs and len(archs) > 0: for arch in archs: for build_type in build_types: log.info("Building for: {0}/{1}...".format( arch["conan_arch"], build_type)) # conan install build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["conan_arch"], const.DIR_NAME_BUILD_CONAN, ) file.remove_dir(build_dir) file.create_dir(build_dir) run_args = [ "conan", "install", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_RECIPE, const.FILE_NAME_FILES_TARGET_CONAN_RECIPE_CONANFILE_PY, ), "-pr:b", target.get_build_profile(), "-pr:h", arch["conan_profile"], "-s:h", "arch={0}".format(arch["conan_arch"]), "-s:h", "os.api_level={0}".format(arch["api_level"]), "-s:h", "build_type={0}".format(build_type), "-o", "ezored_arch={0}".format(arch["conan_arch"]), "-o", "ezored_name={0}".format(target_config["project_name"]), "-o", "ezored_version={0}".format(target_config["version"]), "--build=missing", "--update", ] runner.run(run_args, build_dir) log.ok() else: log.error( 'Arch list for "{0}" is invalid or empty'.format(target_name))
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] install_headers = target_config["install_headers"] param_dry_run = util.list_has_key(params["args"], "--dry-run") if param_dry_run: log.info("Running in dry mode...") if archs and len(archs) > 0: for arch in archs: for build_type in build_types: log.info("Building for: {0}/{1}...".format( arch["conan_arch"], build_type)) # conan build build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, ) clean_build_dir = True if param_dry_run and os.path.isdir(build_dir): clean_build_dir = False if clean_build_dir: file.remove_dir(build_dir) file.create_dir(build_dir) run_args = [ "conan", "build", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_RECIPE, const.FILE_NAME_FILES_TARGET_CONAN_RECIPE_CONANFILE_PY, ), "--source-folder", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CMAKE, ), "--build-folder", os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, ), "--install-folder", os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_CONAN, ), ] runner.run(run_args, build_dir) # find correct info plist file plist_path1 = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Info.plist", ) plist_path2 = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Versions", "Current", "Resources", "Info.plist", ) plist_path = "" if os.path.exists(plist_path1): plist_path = plist_path1 if os.path.exists(plist_path2): plist_path = plist_path2 # add minimum version inside plist runner.run( [ "plutil", "-replace", "MinimumOSVersion", "-string", arch["min_version"], plist_path, ], proj_path, ) # add supported platform inside plist runner.run( [ "plutil", "-replace", "CFBundleSupportedPlatforms", "-json", '[ "{0}" ]'.format(arch["supported_platform"]), plist_path, ], proj_path, ) # headers dist_headers_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Headers", ) file.create_dir(dist_headers_dir) if install_headers: for header in install_headers: source_header_dir = os.path.join( proj_path, header["path"]) if header["type"] == "dir": file.copy_dir( source_header_dir, dist_headers_dir, ignore_file=_header_ignore_list, symlinks=True, ) else: log.error( "Invalid type for install header list for {0}". format(target_name)) # modules support_modules_dir = os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, "support", "modules", ) modules_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Modules", ) file.remove_dir(modules_dir) file.create_dir(modules_dir) file.copy_file( os.path.join(support_modules_dir, "module.modulemap"), os.path.join(modules_dir, "module.modulemap"), ) # umbrella header build_headers_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), "Headers", ) header_files = file.find_files(build_headers_dir, "*.h") content = file.read_file( os.path.join(support_modules_dir, "umbrella-header.h")) for header_file in header_files: header_file = header_file.replace(build_headers_dir + "/", "") content = content + '#import "{0}"\n'.format(header_file) if len(content) > 0: umbrella_file = os.path.join( build_headers_dir, target_config["umbrella_header"]) file.copy_file( os.path.join(support_modules_dir, "umbrella-header.h"), umbrella_file, ) file.write_to_file(umbrella_file, content) else: log.error("{0}".format( "File not generated because framework headers is empty" )) log.ok() else: log.error( 'Arch list for "{0}" is invalid or empty'.format(target_name))
def generate_xcframework(proj_path, target_name, target_config, archs, build_types): log.info("Packaging xcframework...") if archs and len(archs) > 0: if build_types and len(build_types) > 0: for build_type in build_types: log.info("Generating for: {0}...".format(build_type)) # generate group list groups = [] groups_command = [] for arch in archs: if not arch["group"] in groups: groups.append(arch["group"]) groups_command.append("-framework") groups_command.append( os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], "xcframework", "{0}.framework".format( target_config["project_name"]), )) if len(groups) == 0: log.error( "Group list are empty, make sure you have defined group name for each arch in config file for this target" ) # generate framework for each group for group in groups: # get first framework data for current group base_framework_arch = None for arch in archs: if arch["group"] == group: base_framework_arch = arch if not base_framework_arch: log.error( "Group framework was not found: {0}".format(group)) # copy base framework framework_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, base_framework_arch["group"], base_framework_arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), ) group_xcframework_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, group, "xcframework", "{0}.framework".format(target_config["project_name"]), ) file.remove_dir(group_xcframework_dir) file.copy_dir(framework_dir, group_xcframework_dir, symlinks=True) # generate single framework for group lipo_archs_args = [] for arch in archs: if arch["group"] == group: lipo_archs_args.append( os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format( target_config["project_name"]), target_config["project_name"], )) lipo_args = [ "lipo", "-create", "-output", os.path.join(group_xcframework_dir, target_config["project_name"]), ] lipo_args.extend(lipo_archs_args) runner.run(lipo_args, proj_path) # generate xcframework xcframework_dir = os.path.join( proj_path, const.DIR_NAME_DIST, target_name, build_type, "{0}.xcframework".format(target_config["project_name"]), ) file.remove_dir(xcframework_dir) xcodebuild_command = ["xcodebuild", "-create-xcframework"] xcodebuild_command += groups_command xcodebuild_command += ["-output", xcframework_dir] runner.run(xcodebuild_command, proj_path) # check file log.info("Checking file for: {0}...".format(build_type)) runner.run(["ls", xcframework_dir], proj_path) else: log.info('Build type list for "{0}" is invalid or empty'.format( target_name)) else: log.info('Arch list for "{0}" is invalid or empty'.format(target_name))
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] no_framework = util.list_has_key(params["args"], "--no-framework") no_xcframework = util.list_has_key(params["args"], "--no-xcframework") # at least one need be generated if no_framework and no_xcframework: log.error( "You need let generate framework or xcframework, but both are disabled" ) # remove dist folder for the target dist_dir = os.path.join( proj_path, const.DIR_NAME_DIST, target_name, ) file.remove_dir(dist_dir) # generate framework if not no_framework: generate_framework( proj_path=proj_path, target_name=target_name, target_config=target_config, archs=archs, build_types=build_types, ) # generate xcframework if not no_xcframework: generate_xcframework( proj_path=proj_path, target_name=target_name, target_config=target_config, archs=archs, build_types=build_types, ) # add strip framework script (only required if final project use framework instead of xcframework) log.info("Adding strip framework script...") target_scripts_dir = os.path.join( const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_SUPPORT, "scripts", ) file.copy_dir( target_scripts_dir, os.path.join( const.DIR_NAME_DIST, target_name, "scripts", ), ) # cocoapods log.info("Adding cocoapods script...") pod_file_path = os.path.join( const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_SUPPORT, "cocoapods", "{0}.podspec".format(target_config["project_name"]), ) target_pod_file_path = os.path.join( const.DIR_NAME_DIST, target_name, "{0}.podspec".format(target_config["project_name"]), ) file.copy_file( pod_file_path, target_pod_file_path, ) file.replace_in_file(target_pod_file_path, "{NAME}", target_config["project_name"]) file.replace_in_file(target_pod_file_path, "{VERSION}", target_config["version"]) # finish log.ok()
def generate_framework(proj_path, target_name, target_config, archs, build_types): log.info("Packaging framework...") if archs and len(archs) > 0: if build_types and len(build_types) > 0: for build_type in build_types: log.info("Copying for: {0}...".format(build_type)) # copy first folder for base framework_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, archs[0]["group"], archs[0]["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format(target_config["project_name"]), ) dist_dir = os.path.join( proj_path, const.DIR_NAME_DIST, target_name, build_type, "{0}.framework".format(target_config["project_name"]), ) file.remove_dir(dist_dir) file.copy_dir(framework_dir, dist_dir, symlinks=True) # update info plist file plist_path = os.path.join( proj_path, const.DIR_NAME_DIST, target_name, build_type, "{0}.framework".format(target_config["project_name"]), "Info.plist", ) if os.path.exists(plist_path): # remove supported platforms inside plist runner.run( [ "plutil", "-remove", "CFBundleSupportedPlatforms", plist_path, ], proj_path, ) # lipo lipo_archs_args = [] for arch in archs: if is_valid_group(arch["group"]): lipo_archs_args.append( os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", "{0}.framework".format( target_config["project_name"]), target_config["project_name"], )) lipo_args = [ "lipo", "-create", "-output", os.path.join(dist_dir, target_config["project_name"]), ] lipo_args.extend(lipo_archs_args) runner.run(lipo_args, proj_path) # check file log.info("Checking file for: {0}...".format(build_type)) runner.run( [ "file", os.path.join(dist_dir, target_config["project_name"]) ], proj_path, ) else: log.info('Build type list for "{0}" is invalid or empty'.format( target_name)) else: log.info('Arch list for "{0}" is invalid or empty'.format(target_name))
def run(params): args = params["args"] proj_path = params["proj_path"] targets = target.get_all_targets(proj_path) show_target_list = False if len(args) > 0: target_item = args[0] args.pop(0) if target_item in targets: target_verbs = target.get_all_target_verbs(proj_path, target_item) target_verbs = list( util.filter_list(target_verbs, const.TARGET_VERBS_INTERNAL)) show_target_verb_list = False if len(args) > 0: verb_name = args[0] if verb_name in target_verbs: log.info('Running "{0}" on target "{1}"...'.format( verb_name, target_item)) target_verb_folder = os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_item, const.DIR_NAME_FILES_TARGET_VERBS, ) params["target_name"] = target_item runner.run_external( path=target_verb_folder, module_name=verb_name, command_name="run", command_params=params, show_log=False, show_error_log=True, throw_error=True, ) else: show_target_verb_list = True else: show_target_verb_list = True if show_target_verb_list: if target_verbs and len(target_verbs) > 0: log.colored("List of available target verbs:\n", log.PURPLE) for target_verb in target_verbs: log.normal(" - {0}".format(target_verb)) else: log.error("No target verbs available") else: show_target_list = True else: show_target_list = True if show_target_list: show_help(params)
def setup(params): proj_path = params["proj_path"] targets = target.get_all_targets(proj_path) log.info("Creating default profile...") # create default profile runner.run( [ "conan", "profile", "new", "default", "--detect", "--force", ], cwd=os.getcwd(), ) # copy all targets profile log.info("Copying files...") if targets: for target_item in targets: files = file.find_files( os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_item, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_PROFILE, ), "*profile", ) if files: conan_profile_dir = os.path.join( file.home_dir(), const.DIR_NAME_HOME_CONAN, const.DIR_NAME_HOME_CONAN_PROFILES, ) for item in files: filename = os.path.basename(item) log.info('Copying profile "{0}"...'.format(filename)) file.copy_file(item, os.path.join(conan_profile_dir, filename)) # add darwin toolchain log.info("Adding darwin toolchain repository...") runner.run( [ "conan", "remote", "add", "darwin-toolchain", "https://api.bintray.com/conan/ezored/conan-darwin-toolchain", "--force", ], cwd=os.getcwd(), ) log.ok()
def upload( proj_path, version, force, dist_file_path, dist_file_name, dist_folder, aws_key_id, aws_secret_key, aws_bucket_name, aws_bucket_path, ): import boto3 # version if not version or len(version) == 0: log.error("You need define version name (parameter: --version)") log.info("Version defined: {0}".format(version)) # prepare to upload if not os.path.isfile(dist_file_path): log.error("Distribution file not exists: {0}".format(dist_file_path)) # prepare aws sdk log.info("Initializing AWS bucket and SDK...") if not aws_key_id or not aws_secret_key: log.fail("Your AWS credentials are invalid") s3_client = boto3.client( service_name="s3", aws_secret_access_key=aws_secret_key, aws_access_key_id=aws_key_id, ) # checking for existing version log.info("Checking if version exists...") object_name = "{0}/{1}/{2}".format( aws_bucket_path, version, dist_file_name, ) has_version = s3_key_exists(s3_client, aws_bucket_name, object_name) if has_version: if force: log.info( "The version {0} already exists, removing...".format(version)) s3_prefix_delete( s3_client, aws_bucket_name, object_name, aws_secret_key, aws_key_id, ) else: log.error("The version {0} already exists".format(version)) # upload log.info('Uploading file "{0}" to S3 bucket "{1}"...'.format( dist_file_path, aws_bucket_name)) s3_client.upload_file( dist_file_path, aws_bucket_name, object_name, ExtraArgs={"ACL": "public-read"}, Callback=ProgressPercentage(dist_file_path), ) log.normal("") log.ok("")
def code_format(params): proj_path = params["proj_path"] # format c++ files has_tool = check_cpp_formatter() if has_tool: dir_list = [ { "path": os.path.join(proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_SRC), "patterns": ["*.cpp", "*.hpp"], }, { "path": os.path.join(proj_path, const.DIR_NAME_PROJECTS), "patterns": ["*.cpp", "*.hpp"], }, ] if dir_list: log.info("Formating C++ files...") for dir_item in dir_list: patterns = dir_item["patterns"] for pattern_item in patterns: files = file.find_files(dir_item["path"], pattern_item) for file_item in files: log.info('Formatting file "{0}"...'.format( os.path.relpath(file_item))) run_args = [ "clang-format", "-style", "file", "-i", file_item ] runner.run(run_args, proj_path) log.ok() else: log.error("No C++ files found to format") # format python files has_tool = check_php_formatter() if has_tool: dir_list = [ { "path": proj_path, "patterns": ["make.py"] }, { "path": os.path.join(proj_path, const.DIR_NAME_FILES), "patterns": ["*.py"], }, ] if dir_list: log.info("Formating Python files...") for dir_item in dir_list: patterns = dir_item["patterns"] for pattern_item in patterns: files = file.find_files(dir_item["path"], pattern_item) for file_item in files: log.info('Formatting file "{0}"...'.format( os.path.relpath(file_item))) run_args = ["black", "-q", file_item] runner.run(run_args, proj_path) log.ok() else: log.error("No Python files found to format")
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] if archs and len(archs) > 0: for arch in archs: for build_type in build_types: log.info("Building for: {0}/{1}...".format( arch["conan_arch"], build_type)) # conan install build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type, arch["group"], arch["conan_arch"], const.DIR_NAME_BUILD_CONAN, ) file.remove_dir(build_dir) file.create_dir(build_dir) run_args = [ "conan", "install", os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_RECIPE, const.FILE_NAME_FILES_TARGET_CONAN_RECIPE_CONANFILE_PY, ), "--profile", arch["conan_profile"], "-s", "arch={0}".format(arch["conan_arch"]), "-s", "build_type={0}".format(build_type), "-s", "os.version={0}".format(arch["min_version"]), "-o", "ezored_arch={0}".format(arch["conan_arch"]), "-o", "ezored_name={0}".format(target_config["project_name"]), "-o", "ezored_version={0}".format(target_config["version"]), "-o", "ezored_group={0}".format(arch["group"]), "-o", "darwin-toolchain:enable_bitcode={0}".format( (arch["enable_bitcode"] if "enable_bitcode" in arch else None)), "-o", "darwin-toolchain:enable_arc={0}".format( (arch["enable_arc"] if "enable_arc" in arch else None)), "-o", "darwin-toolchain:enable_visibility={0}".format( (arch["enable_visibility"] if "enable_visibility" in arch else None)), "--build=missing", "--update", ] runner.run(run_args, build_dir) log.ok() else: log.error( 'Arch list for "{0}" is invalid or empty'.format(target_name))
def run(params): proj_path = params["proj_path"] target_name = params["target_name"] target_config = config.run(proj_path, target_name, params) archs = target_config["archs"] build_types = target_config["build_types"] module_name = "library" log.info("Creating AAR library...") if archs and len(archs) > 0: if build_types and len(build_types) > 0: for build_type in build_types: log.info("Creating AAR library for: {0}...".format(build_type)) build_dir = os.path.join( proj_path, const.DIR_NAME_BUILD, target_name, build_type ) # copy library project template android_library_build_dir = os.path.join(build_dir, "aar") file.remove_dir(android_library_build_dir) file.create_dir(android_library_build_dir) android_project_dir = os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_SUPPORT, "android-aar-project", ) file.copy_dir( android_project_dir, android_library_build_dir, symlinks=True ) # replace data build_gradle_file = os.path.join( android_library_build_dir, "library", "build.gradle", ) file.replace_in_file( build_gradle_file, "{VERSION}", target_config["version"] ) file.replace_in_file( build_gradle_file, "{VERSION_CODE}", target_config["version_code"] ) # copy glue code support lib files gluecode_support_lib_dir = os.path.join( proj_path, const.DIR_NAME_FILES, "gluecode", "support-lib" ) file.copy_all_inside( os.path.join(gluecode_support_lib_dir, "java"), os.path.join( android_library_build_dir, module_name, "src", "main", "java" ), ) # copy all modules glue code files modules_dir = os.path.join(proj_path, const.DIR_NAME_FILES, "gluecode") modules = file.find_dirs_simple(modules_dir, "*") for module in modules: module_dir_name = os.path.basename(module) if module_dir_name == "support-lib": continue module_dir = os.path.join( modules_dir, module_dir_name, "generated-src", "java" ) if file.dir_exists(module_dir): file.copy_all_inside( module_dir, os.path.join( android_library_build_dir, module_name, "src", "main", "java", ), ) # copy all modules implementation files modules_dir = os.path.join( proj_path, const.DIR_NAME_FILES, const.DIR_NAME_FILES_SRC ) modules = file.find_dirs_simple(modules_dir, "*") for module in modules: module_dir_name = os.path.basename(module) module_dir = os.path.join(modules_dir, module_dir_name, "java") if file.dir_exists(module_dir): file.copy_all_inside( module_dir, os.path.join( android_library_build_dir, module_name, "src", "main", "java", ), ) # copy all native libraries for arch in archs: compiled_arch_dir = os.path.join( build_dir, arch["conan_arch"], const.DIR_NAME_BUILD_TARGET, "lib", ) target_arch_dir = os.path.join( android_library_build_dir, "library", "src", "main", "jniLibs", arch["arch"], ) file.copy_all_inside(compiled_arch_dir, target_arch_dir) # build aar android_module_dir = os.path.join( android_library_build_dir, module_name ) if util.is_windows_platform(): run_args = [ os.path.join("..", "gradlew.bat"), "bundle{0}Aar".format(build_type), ] else: run_args = [ os.path.join("..", "gradlew"), "bundle{0}Aar".format(build_type), ] runner.run(run_args, android_module_dir) # copy files arr_dir = os.path.join( android_library_build_dir, module_name, "build", "outputs", "aar" ) dist_dir = os.path.join( proj_path, const.DIR_NAME_DIST, target_name, build_type ) file.remove_dir(dist_dir) file.copy_all_inside(arr_dir, dist_dir) log.ok() else: log.info( 'Build type list for "{0}" is invalid or empty'.format(target_name) ) else: log.info('Arch list for "{0}" is invalid or empty'.format(target_name))