def run(params={}): log.info('Cleaning...') fn.remove_dir(os.path.join(fn.root_dir(), 'build')) fn.purge_files(fn.root_dir(), '*.pyc') fn.purge_files(fn.root_dir(), 'Thumbs.db') fn.purge_files(fn.root_dir(), '.DS_Store') fn.purge_dirs(fn.root_dir(), '__pycache__') log.ok('')
def run(params={}): target_name = params['target_name'] target_config = fn.get_target_config(target_name) archs = target_config['archs'] build_types = target_config['build_types'] if archs and len(archs) > 0: for arch in archs: for build_type in build_types: log.info('Building for: {0}/{1}...'.format( arch['conan_arch'], build_type)) # conan install build_dir = os.path.join( fn.root_dir(), const.DIR_NAME_BUILD, target_name, build_type, arch['conan_arch'], const.DIR_NAME_BUILD_CONAN, ) fn.remove_dir(build_dir) fn.create_dir(build_dir) run_args = [ 'conan', 'install', os.path.join( fn.root_dir(), const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_RECIPE, const.FILE_NAME_FILES_TARGET_CONAN_RECIPE_CONANFILE_PY, ), '--profile', '{0}'.format(target_config['conan_profile']), '-s', 'arch={0}'.format(arch['conan_arch']), '-s', 'build_type={0}'.format(build_type), '--build=missing', '--update', ] fn.run_simple(run_args, build_dir) else: log.error( 'Arch list for "{0}" is invalid or empty'.format(target_name))
def install_profiles(params={}): log.info('Copying files...') targets = fn.get_all_targets() if targets: for target in targets: files = fn.find_files( os.path.join(fn.root_dir(), const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_PROFILES), '*profile') if files: conan_profile_dir = os.path.join( fn.home_dir(), const.DIR_NAME_HOME_CONAN, const.DIR_NAME_HOME_CONAN_PROFILES) for item in files: filename = os.path.basename(item) log.info('Copying profile "{0}"...'.format(filename)) fn.copy_file(item, os.path.join(conan_profile_dir, filename)) log.ok()
def run(params={}): target_name = params['target_name'] target_config = fn.get_target_config(target_name) archs = target_config['archs'] build_types = target_config['build_types'] log.info('Packaging...') if archs and len(archs) > 0: for arch in archs: for build_type in build_types: log.info('Copying for: {0}/{1}...'.format( arch['conan_arch'], build_type)) # create folders dist_dir = os.path.join( fn.root_dir(), const.DIR_NAME_DIST, target_name, build_type, arch['conan_arch'], ) fn.remove_dir(dist_dir) fn.create_dir(dist_dir) build_dir = os.path.join( fn.root_dir(), const.DIR_NAME_BUILD, target_name, build_type, arch['conan_arch'], const.DIR_NAME_BUILD_TARGET, 'bin', ) # copy files fn.copy_all_inside(build_dir, dist_dir) else: log.error( 'Arch list for "{0}" is invalid or empty'.format(target_name))
def generate(params={}): dirs = fn.find_dirs_simple( os.path.join(fn.root_dir(), const.DIR_NAME_FILES, const.DIR_NAME_DJINNI), '*') if dirs: log.info('Generating files for all modules...') dirs.sort() for item in dirs: if fn.file_exists(os.path.join(item, 'generate.py')): dir_name = os.path.basename(item) log.info( 'Generating djinni files for "{0}"...'.format(dir_name)) fn.run_simple(['python', 'generate.py'], item) log.ok() else: log.error('No djinni modules to generate')
def run(params={}): target_name = params['target_name'] target_config = fn.get_target_config(target_name) archs = target_config['archs'] build_types = target_config['build_types'] log.info('Packaging universal framework...') if archs and len(archs) > 0: if build_types and len(build_types) > 0: for build_type in build_types: log.info('Copying for: {0}...'.format(build_type)) # copy first folder for base framework_dir = os.path.join( fn.root_dir(), const.DIR_NAME_BUILD, target_name, build_type, archs[0]['conan_arch'], const.DIR_NAME_BUILD_TARGET, 'lib', '{0}.framework'.format(target_config['project_name']), ) dist_dir = os.path.join( fn.root_dir(), const.DIR_NAME_DIST, target_name, build_type, '{0}.framework'.format(target_config['project_name']), ) fn.remove_dir(dist_dir) fn.copy_dir(framework_dir, dist_dir) # lipo lipo_archs_args = [] for arch in archs: lipo_archs_args.append(os.path.join( fn.root_dir(), const.DIR_NAME_BUILD, target_name, build_type, arch['conan_arch'], const.DIR_NAME_BUILD_TARGET, 'lib', '{0}.framework'.format(target_config['project_name']), target_config['project_name'], )) lipo_args = [ 'lipo', '-create', '-output', os.path.join( dist_dir, target_config['project_name'] ) ] lipo_args.extend(lipo_archs_args) fn.run_simple(lipo_args, fn.root_dir()) # check file log.info('Checking file for: {0}...'.format(build_type)) fn.run_simple([ 'file', os.path.join( dist_dir, target_config['project_name'] ) ], fn.root_dir()) else: log.info('Build type list for "{0}" is invalid or empty'.format( target_name )) else: log.info('Arch list for "{0}" is invalid or empty'.format(target_name))
def run(params={}): target_name = params['target_name'] target_config = fn.get_target_config(target_name) archs = target_config['archs'] build_types = target_config['build_types'] module_name = 'library' log.info('Creating AAR library...') if archs and len(archs) > 0: if build_types and len(build_types) > 0: for build_type in build_types: log.info('Creating AAR library for: {0}...'.format(build_type)) build_dir = os.path.join( fn.root_dir(), const.DIR_NAME_BUILD, target_name, build_type, ) android_library_build_dir = os.path.join( build_dir, 'aar', ) fn.remove_dir(android_library_build_dir) fn.create_dir(android_library_build_dir) android_project_dir = os.path.join( fn.root_dir(), const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_SUPPORT, 'android-aar-project', ) fn.copy_dir(android_project_dir, android_library_build_dir) # copy djinni support lib files djinni_support_lib_dir = os.path.join( fn.root_dir(), const.DIR_NAME_FILES, 'djinni', 'support-lib', ) fn.copy_all_inside( os.path.join(djinni_support_lib_dir, 'java'), os.path.join( android_library_build_dir, module_name, 'src', 'main', 'java' ) ) # copy all modules djinni files modules_dir = os.path.join( fn.root_dir(), const.DIR_NAME_FILES, 'djinni', ) modules = fn.find_dirs_simple(modules_dir, '*') for module in modules: module_dir_name = os.path.basename(module) if module_dir_name == 'support-lib': continue module_dir = os.path.join( modules_dir, module_dir_name, 'generated-src', 'java' ) if fn.dir_exists(module_dir): fn.copy_all_inside( module_dir, os.path.join( android_library_build_dir, module_name, 'src', 'main', 'java' ) ) # copy all modules implementation files modules_dir = os.path.join( fn.root_dir(), const.DIR_NAME_FILES, const.DIR_NAME_FILES_SRC, ) modules = fn.find_dirs_simple(modules_dir, '*') for module in modules: module_dir_name = os.path.basename(module) module_dir = os.path.join( modules_dir, module_dir_name, 'java' ) if fn.dir_exists(module_dir): fn.copy_all_inside( module_dir, os.path.join( android_library_build_dir, module_name, 'src', 'main', 'java' ) ) # copy all native libraries for arch in archs: compiled_arch_dir = os.path.join( build_dir, arch['conan_arch'], const.DIR_NAME_BUILD_TARGET, 'lib', ) target_arch_dir = os.path.join( android_library_build_dir, 'library', 'src', 'main', 'jniLibs', arch['arch'], ) fn.copy_all_inside( compiled_arch_dir, target_arch_dir, ) # build aar android_module_dir = os.path.join( android_library_build_dir, module_name, ) run_args = [ '../gradlew', 'bundle{0}Aar'.format(build_type), ] fn.run_simple( run_args, android_module_dir ) # copy files arr_dir = os.path.join( android_library_build_dir, module_name, 'build', 'outputs', 'aar', ) dist_dir = os.path.join( fn.root_dir(), const.DIR_NAME_DIST, target_name, build_type, ) fn.remove_dir(dist_dir) fn.copy_all_inside( arr_dir, dist_dir ) else: log.info('Build type list for "{0}" is invalid or empty'.format( target_name )) else: log.info('Arch list for "{0}" is invalid or empty'.format(target_name))
def run(params={}): target_name = params['target_name'] target_config = fn.get_target_config(target_name) archs = target_config['archs'] build_types = target_config['build_types'] if archs and len(archs) > 0: for arch in archs: for build_type in build_types: log.info('Building for: {0}/{1}...'.format( arch['conan_arch'], build_type)) # conan install build_dir = os.path.join( fn.root_dir(), const.DIR_NAME_BUILD, target_name, build_type, arch['conan_arch'], const.DIR_NAME_BUILD_CONAN, ) fn.remove_dir(build_dir) fn.create_dir(build_dir) run_args = [ 'conan', 'install', os.path.join( fn.root_dir(), const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_RECIPE, const.FILE_NAME_FILES_TARGET_CONAN_RECIPE_CONANFILE_PY, ), '--profile', '{0}'.format(target_config['conan_profile']), '-s', 'arch={0}'.format(arch['conan_arch']), '-s', 'build_type={0}'.format(build_type), '-o', '{0}:ios_arch={1}'.format(target_name, arch['arch']), '-o', '{0}:ios_platform={1}'.format(target_name, arch['platform']), '-o', '{0}:ios_deployment_target={1}'.format( target_name, target_config['min_version']), '-o', '{0}:enable_bitcode={1}'.format( target_name, ('1' if target_config['bitcode'] else '0')), '-o', '{0}:enable_arc={1}'.format( target_name, ('1' if target_config['enable_arc'] else '0')), '-o', '{0}:enable_visibility={1}'.format( target_name, ('1' if target_config['enable_visibility'] else '0')), '-o', '{0}:cmake_toolchain_file={1}'.format( target_name, os.path.join( fn.root_dir(), const.DIR_NAME_FILES, const.DIR_NAME_FILES_CMAKE, 'toolchains', 'ios.cmake', )), '-o', 'darwin-toolchain:bitcode={0}'.format( 'True' if target_config['bitcode'] else 'False'), '--build=missing', '--update', ] fn.run_simple(run_args, build_dir) else: log.error( 'Arch list for "{0}" is invalid or empty'.format(target_name))
def run(params={}): target_name = params['target_name'] target_config = fn.get_target_config(target_name) archs = target_config['archs'] build_types = target_config['build_types'] install_headers = target_config['install_headers'] if archs and len(archs) > 0: for arch in archs: for build_type in build_types: log.info('Building for: {0}/{1}...'.format( arch['conan_arch'], build_type )) # conan build build_dir = os.path.join( fn.root_dir(), const.DIR_NAME_BUILD, target_name, build_type, arch['conan_arch'], const.DIR_NAME_BUILD_TARGET, ) fn.remove_dir(build_dir) fn.create_dir(build_dir) run_args = [ 'conan', 'build', os.path.join( fn.root_dir(), const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CONAN, const.DIR_NAME_FILES_TARGET_CONAN_RECIPE, const.FILE_NAME_FILES_TARGET_CONAN_RECIPE_CONANFILE_PY, ), '--source-folder', os.path.join( fn.root_dir(), const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_CMAKE, ), '--build-folder', os.path.join( fn.root_dir(), const.DIR_NAME_BUILD, target_name, build_type, arch['conan_arch'], const.DIR_NAME_BUILD_TARGET, ), '--install-folder', os.path.join( fn.root_dir(), const.DIR_NAME_BUILD, target_name, build_type, arch['conan_arch'], const.DIR_NAME_BUILD_CONAN, ), ] fn.run_simple( run_args, build_dir ) # headers dist_headers_dir = os.path.join( fn.root_dir(), const.DIR_NAME_BUILD, target_name, build_type, archs[0]['conan_arch'], const.DIR_NAME_BUILD_TARGET, 'lib', '{0}.framework'.format(target_config['project_name']), 'Headers', ) fn.create_dir(dist_headers_dir) if install_headers: for header in install_headers: source_header_dir = os.path.join( fn.root_dir(), header['path'], ) if header['type'] == 'dir': fn.copy_all_inside( source_header_dir, dist_headers_dir, ) else: log.error('Invalid type for install header list for {0}'.format( target_name )) else: log.error('Arch list for "{0}" is invalid or empty'.format( target_name ))
def run(params={}): target_name = params['target_name'] target_config = fn.get_target_config(target_name) archs = target_config['archs'] build_types = target_config['build_types'] log.info('Generating bridging header...') if archs and len(archs) > 0: if build_types and len(build_types) > 0: for build_type in build_types: log.info('Generating for: {0}...'.format(build_type)) dist_headers_dir = os.path.join( fn.root_dir(), const.DIR_NAME_DIST, target_name, build_type, '{0}.framework'.format(target_config['project_name']), 'Headers', ) header_files = fn.find_files(dist_headers_dir, '*.h') bridge_file = os.path.join( fn.root_dir(), const.DIR_NAME_DIST, target_name, build_type, 'Bridging-Header.h', ) content = '' for header_file in header_files: header_file_name = os.path.basename(header_file) if not validate_header_file_name(header_file_name): continue header_file = header_file.replace(dist_headers_dir + '/', '') content = content + '#include \"{0}\"\n'.format( header_file, ) if len(content) > 0: fn.remove_file(bridge_file) fn.write_to_file( os.path.join( fn.root_dir(), const.DIR_NAME_DIST, target_name, build_type, ), 'Bridging-Header.h', content) else: log.error('{0}'.format( 'File not generate because framework headers is empty') ) else: log.info('Build type list for "{0}" is invalid or empty'.format( target_name)) else: log.info('Arch list for "{0}" is invalid or empty'.format(target_name))
def run(params={}): args = params['args'] targets = fn.get_all_targets() show_target_list = False if len(args) > 0: target_name = args[0] args.pop(0) if target_name in targets: target_verbs = fn.get_all_target_verbs(target_name) target_verbs = list( fn.filter_list(target_verbs, const.TARGET_VERBS_INTERNAL)) show_target_verb_list = False if len(args) > 0: verb_name = args[0] if verb_name in target_verbs: log.info('Running "{0}" on target "{1}"...'.format( verb_name, target_name)) target_verb_folder = os.path.join( fn.root_dir(), const.DIR_NAME_FILES, const.DIR_NAME_FILES_TARGETS, target_name, const.DIR_NAME_FILES_TARGET_VERBS, ) params = { 'target_name': target_name, 'args': args, } fn.exec_external( path=target_verb_folder, module_name=verb_name, command_name='run', command_params=params, show_log=False, show_error_log=True, throw_error=True, ) log.ok() else: show_target_verb_list = True else: show_target_verb_list = True if show_target_verb_list: if target_verbs and len(target_verbs) > 0: log.colored('List of available target verbs:\n', log.PURPLE) for target_verb in target_verbs: log.normal(' - {0}'.format(target_verb)) else: log.error('No target verbs available') else: show_target_list = True else: show_target_list = True if show_target_list: help(params)