def process_subtemplates(p_page_name_str, p_build_dir_str, p_page_info_map, p_log_fun): assert isinstance(p_page_name_str, str) assert os.path.isdir(p_build_dir_str) assert "subtemplates_lst" in p_page_info_map.keys() print("") p_log_fun( "INFO", "%s------------ SUBTEMPLATES --------------------------------%s" % (fg("yellow"), attr(0))) print("") subtemplates_lst = p_page_info_map["subtemplates_lst"] assert isinstance(subtemplates_lst, list) # SUBTEMPLATES__BUILD_DIR target_subtemplates_build_dir_str = f"{p_build_dir_str}/templates/{p_page_name_str}/subtemplates" if not os.path.isdir(target_subtemplates_build_dir_str): gf_core_cli.run(f"mkdir -p {target_subtemplates_build_dir_str}") for s_path_str in subtemplates_lst: print(s_path_str) assert isinstance(s_path_str, str) assert os.path.isfile(s_path_str) assert s_path_str.endswith(".html") # SUBTEMPLATE__COPY gf_core_cli.run(f"cp {s_path_str} {target_subtemplates_build_dir_str}")
def process_files_to_copy(p_page_info_map, p_log_fun): assert isinstance(p_page_info_map, dict) print("") p_log_fun( "INFO", "%s------------ COPY_FILES --------------------------------%s" % (fg("yellow"), attr(0))) print("") files_to_copy_lst = p_page_info_map["files_to_copy_lst"] assert isinstance(files_to_copy_lst, list) # COPY_FILES for file_to_copy_tpl in files_to_copy_lst: assert isinstance(file_to_copy_tpl, tuple) src_file_str, target_dir_str = file_to_copy_tpl assert os.path.isfile(src_file_str) if not os.path.isdir(target_dir_str): gf_core_cli.run(f'mkdir -p {target_dir_str}') gf_core_cli.run(f"cp {src_file_str} {target_dir_str}") final_path_str = f"{target_dir_str}/{os.path.basename(src_file_str)}" assert os.path.isfile(final_path_str)
def copy_files(p_copy_to_dir_lst): assert isinstance(p_copy_to_dir_lst, list) print("") print(" COPY FILES") for src_f_str, target_dir_str in p_copy_to_dir_lst: if not os.path.isdir(target_dir_str): gf_core_cli.run("mkdir -p %s"%(target_dir_str)) gf_core_cli.run("cp %s %s"%(src_f_str, target_dir_str))
def push(p_image_full_name_str, p_docker_user_str, p_docker_pass_str, p_log_fun, p_host_str=None, p_exit_on_fail_bool=False, p_docker_sudo_bool=False): p_log_fun("FUN_ENTER", "gf_os_docker.push()") p_log_fun("INFO", f"image_full_name - {p_image_full_name_str}") assert isinstance(p_docker_user_str, str) #------------------ # LOGIN login(p_docker_user_str, p_docker_pass_str, p_host_str=p_host_str, p_exit_on_fail_bool=p_exit_on_fail_bool, p_docker_sudo_bool=p_docker_sudo_bool) #------------------ cmd_lst = [] if p_docker_sudo_bool: cmd_lst.append("sudo") cmd_lst.extend(["docker push", p_image_full_name_str]) c_str = " ".join(cmd_lst) p_log_fun("INFO", " - %s" % (c_str)) stdout_str, stderr_str, exit_code_int = gf_core_cli.run(c_str) if not stderr_str == "": print(stderr_str) # IMPORTANT!! - failure to reach Dcoerk daemon should always exit. its not a expected failure. if "Cannot connect to the Docker daemon" in stderr_str: exit(1) # IMPORTANT!! - if command returns a non-zero exit code in some environments (CI) we # want to fail with that a non-zero exit code - this way CI will flag builds as failed. # in other scenarious its acceptable for this command to fail, and we want the caller # to keep executing. if not exit_code_int == 0: if p_exit_on_fail_bool: exit(exit_code_int) #------------------ # DOCKER_LOGOUT cmd_lst = [] if p_docker_sudo_bool: cmd_lst.append("sudo") cmd_lst.append("docker logout") stdout_str, _, _ = gf_core_cli.run(" ".join(cmd_lst)) print(stdout_str)
def publish(p_cont_image_name_str, p_docker_user_str, p_docker_pass_str, p_docker_sudo_bool=False): print("PUBLISHING CONTAINER -----------=========================") print(f"container image name - {p_cont_image_name_str}") # LOGIN docker_login(p_docker_user_str, p_docker_pass_str, p_docker_sudo_bool=p_docker_sudo_bool) #------------------------ c_lst = [] if p_docker_sudo_bool: c_lst.append("sudo") c_lst.extend([f"docker push {p_cont_image_name_str}"]) c_str = " ".join(c_lst) print(c_str) _, _, exit_code_int = gf_core_cli.run(c_str) if not exit_code_int == 0: exit(1)
def build(p_cont_image_name_str, p_dockerfile_path_str, p_docker_sudo_bool=False): docker_context_dir_str = f"{modd_str}/../.." print("BUILDING CONTAINER -----------=========================") print(f"container image name - {p_cont_image_name_str}") print(f"dockerfile - {p_dockerfile_path_str}") assert os.path.isfile(p_dockerfile_path_str) c_lst = [] if p_docker_sudo_bool: c_lst.append("sudo") c_lst.extend([ "docker build", f"-f {p_dockerfile_path_str}", f"--tag={p_cont_image_name_str}", docker_context_dir_str ]) c_str = " ".join(c_lst) print(c_str) _, _, exit_code_int = gf_core_cli.run(c_str) if not exit_code_int == 0: exit(1)
def cont_is_running(p_cont_name_str, p_log_fun, p_exit_on_fail_bool=True, p_docker_sudo_bool=True): sudo_str = "" if p_docker_sudo_bool: sudo_str = "sudo" stdout_str, stderr_str, exit_code_int = gf_core_cli.run( "%s docker ps -a | grep %s" % (sudo_str, p_cont_name_str)) if not stderr_str == "": print(stderr_str) # IMPORTANT!! - failure to reach Dcoerk daemon should always exit. its not a expected failure. if "Cannot connect to the Docker daemon" in stderr_str: exit(1) if stdout_str == "": print("CONTAINER NOT RUNNING -----------------------") return False else: print("CONTAINER RUNNING -----------------------") return True
def remove_by_name(p_container_name_str, p_log_fun, p_exit_on_fail_bool=False, p_docker_sudo_bool=True): sudo_str = "" if p_docker_sudo_bool: sudo_str = "sudo" cmd_str = "%s docker rm -f `%s docker ps -a | grep %s | awk '{print $1}'`" % ( sudo_str, sudo_str, p_container_name_str) stdout_str, stderr_str, exit_code_int = gf_core_cli.run(cmd_str) if not stderr_str == "": print(stderr_str) # IMPORTANT!! - failure to reach Dcoerk daemon should always exit. its not a expected failure. if "Cannot connect to the Docker daemon" in stderr_str: exit(1) # IMPORTANT!! - if command returns a non-zero exit code in some environments (CI) we # want to fail with that a non-zero exit code - this way CI will flag builds as failed. # in other scenarious its acceptable for this command to fail, and we want the caller # to keep executing. if not exit_code_int == 0: if p_exit_on_fail_bool: exit(exit_code_int)
def prepare_libs__extern(p_target_lib_dir_str, p_tf_libs_bool=False, p_exit_on_fail_bool=True): #------------- # TENSORFLOW # IMPORTANT!! - download TF lib and place it in appropriate dir, to have a fresh TF libs # in the build server context, without including the lib in the repo itself. if p_tf_libs_bool: print(f"{fg('green')}prepare TensorFlow lib{attr(0)}") lib_file_name_str = "tflib.tar.gz" tf__version_str = "1.15.0" tf__filename_str = f"libtensorflow-cpu-linux-x86_64-{tf__version_str}.tar.gz" tf__url_str = f"https://storage.googleapis.com/tensorflow/libtensorflow/{tf__filename_str}" gf_core_cli.run(f"mkdir -p {p_target_lib_dir_str}/tf_lib") # DOWNLOAD _, _, exit_code_int = gf_core_cli.run( f"curl {tf__url_str} --output {lib_file_name_str}") if not exit_code_int == 0: if p_exit_on_fail_bool: exit(exit_code_int) # UNPACK gf_core_cli.run( f"mv {lib_file_name_str} {p_target_lib_dir_str}/{lib_file_name_str}" ) gf_core_cli.run( f"tar -xvzf {p_target_lib_dir_str}/{lib_file_name_str} -C {p_target_lib_dir_str}/tf_lib" )
def get_libs_for_linking(): # RUST_DYNAMIC_LIBS dynamic_libs_dir_path_str = os.path.abspath( f"{modd_str}/../../rust/build") tf_dynamic_libs_dir_path_str = os.path.abspath( f"{modd_str}/../../rust/build/tf_lib/lib") print( f"dynamic libs dir - {fg('green')}{dynamic_libs_dir_path_str}{attr(0)}" ) gf_core_cli.run(f"ls -al {dynamic_libs_dir_path_str}") LD_paths_lst = [ dynamic_libs_dir_path_str, tf_dynamic_libs_dir_path_str ] LD_paths_str = f"LD_LIBRARY_PATH={':'.join(LD_paths_lst)}" return LD_paths_str
def run(p_cont_image_name_str, p_docker_ports_lst=[], p_docker_sudo_bool=False): c_lst = [] if p_docker_sudo_bool: c_lst.append("sudo") ports_str = ' '.join([f'-p {p}:{p2}' for p, p2 in p_docker_ports_lst]) c_lst.extend([f"docker run {ports_str} {p_cont_image_name_str}"]) c_str = " ".join(c_lst) print(c_str) _, _, exit_code_int = gf_core_cli.run(c_str) if not exit_code_int == 0: exit(1)
def prepare_web_files(p_pages_map, p_service_base_dir_str, p_log_fun, p_docker_sudo_bool = False): p_log_fun("FUN_ENTER", "gf_containers.prepare_web_files()") assert isinstance(p_pages_map, dict) assert os.path.dirname(p_service_base_dir_str) for pg_name_str, pg_info_map in p_pages_map.items(): print(f"======== {fg('green')}{'%s'%(pg_name_str)}{attr(0)}") assert isinstance(pg_info_map, dict) assert "build_dir_str" in pg_info_map.keys() assert os.path.isdir(pg_info_map["build_dir_str"]) build_dir_str = os.path.abspath(pg_info_map["build_dir_str"]) #------------------ # CREATE_TARGET_DIR target_dir_str = os.path.abspath(f"{p_service_base_dir_str}/static") gf_core_cli.run(f"mkdir -p {target_dir_str}") #------------------ # COPY_PAGE_WEB_CODE gf_core_cli.run(f"cp -r {build_dir_str}/* {target_dir_str}") #------------------ #------------------ # MOVE_TEMPLATES_OUT_OF_STATIC # IMPORTANT!! - templates should not be in the static/ dir, which would make them servable # over HTTP which we dont want. instead its moved out of the static/ dir # to its parent dir where its private. # templates are originally in the static/ dir because durring the build process they were # handled together with other static content (html/css/js files) and as output moved # into that static/ dir from other locations while in development. gf_core_cli.run("rm -rf %s/../templates"%(target_dir_str)) # remove existing templates build dir gf_core_cli.run("mv %s/templates %s/.."%(target_dir_str, target_dir_str))
def process_css(): p_log_fun( "INFO", "%s------------ CSS ---------------------------------------%s" % (fg("yellow"), attr(0))) css_links_lst = soup.findAll("link", {"type": "text/css"}) target_dir_str = f'{p_build_dir_str}/css/{p_page_name_str}' gf_core_cli.run( f'mkdir -p {target_dir_str}') #create dir and all parent dirs for css in css_links_lst: src_str = css["href"] assert src_str.endswith('.css') or src_str.endswith('.scss') if src_str.startswith('http://') or src_str.startswith('https://'): print('EXTERNAL_URL - DO NOTHING') continue # full paths are relative to the dir holding the main html file (app entry point) full_path_str = os.path.abspath( f'{os.path.dirname(main_html_path_str)}/{src_str}') print(full_path_str) assert os.path.isfile(full_path_str) # SASS if src_str.endswith('.scss'): css_file_name_str = os.path.basename(src_str).replace( '.scss', '.css') final_src_str = f'{target_dir_str}/{css_file_name_str}' gf_core_cli.run(f'sass {full_path_str} {final_src_str}') # HTML_MODIFY - change the src in the html tag, to include the url_base # (dont leave relative path) css["href"] = f'{url_base_str}/css/{p_page_name_str}/{css_file_name_str}' # CSS else: gf_core_cli.run(f'cp {full_path_str} {target_dir_str}') # HTML_MODIFY - change the src in the html tag, to include the url_base (dont leave relative path) css["href"] = f'{url_base_str}/css/{p_page_name_str}/{os.path.basename(full_path_str)}'
def build_typescript(p_out_file_str): cmd_lst = [ "tsc", "--module system", # needed with the "--out" option "--target es2017", # "--target es6", # Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports # '--esModuleInterop', f"--out {p_out_file_str}", main_ts_file_str ] cmd_str = " ".join(cmd_lst) print(cmd_str) _, _, return_code_int = gf_core_cli.run(cmd_str) if return_code_int > 0: print("ERROR!! - TypeScript Compilation failed!") exit(-1) # minify into the same file name as the Typescript compiler output target_dir_str = os.path.dirname(p_out_file_str) minify_js(p_out_file_str, [p_out_file_str], p_log_fun)
def run(p_cargo_crate_dir_path_str, p_static_bool=False, p_exit_on_fail_bool=True, p_verbose_bool=False): assert os.path.isdir(p_cargo_crate_dir_path_str) print(f"{fg('yellow')}BUILD{attr(0)}") print(f"crate dir - {fg('yellow')}{p_cargo_crate_dir_path_str}{attr(0)}") cwd_str = os.getcwd() os.chdir(os.path.abspath( p_cargo_crate_dir_path_str)) # change into the target main package dir #------------- # "rustup update stable" # _, _, exit_code_int = gf_cli_utils.run_cmd("cargo clean") #------------- c_lst = [] if p_static_bool: # DOCUMENT!! - without this the py extension wont compile. # complaining that target for musl from gf_images_job lib cant be used, since this py extension # package is marked as a dynamic lib (which it has to be to be importable by the Py VM). if os.path.basename(p_cargo_crate_dir_path_str) == "gf_images_jobs_py": c_lst.append("RUSTFLAGS='-C target-feature=-crt-static'") c_lst.extend([ # 'RUSTFLAGS="$RUSTFLAGS -A warnings"', # turning off rustc warnings # "RUSTFLAGS='-L %s'"%(os.path.abspath("%s/../../rust/gf_images_jobs/test"%(modd_str))), # if compiling on Ubuntu for Alpine for example, this ENV var should be set # "PKG_CONFIG_ALLOW_CROSS=1", "cargo build", ]) if p_verbose_bool: # c_lst.append("--verbose") c_lst.append("-vv") # very verbose # STATIC_LINKING - some outputed libs (imported by Go for example) should contain their # own versions of libs statically linked into them. if p_static_bool: #------------- # MUSL - staticaly compile libc compatible lib into the output binary. without MUSL # rust statically compiles all program libs except the standard lib. # musl-gcc - musl-gcc is a wrapper around GCC that uses the musl C standard library # implementation to build programs. It is well suited for being linked with other libraries # into a single static executable with no shared dependencies. # its used by "cargo build" if we target linux-musl. # "sudo apt-get install musl-tools" - make sure its installed # # x86_64-unknown-linux-musl - for 64-bit Linux. # for this to work "rustup" has to be used to install this # build target into the Rust toolchain. # (for GF CI this is done in the gf_builder Dockerfile__gf_builder) c_lst.append("--target x86_64-unknown-linux-musl") #------------- # DYNAMIC_LINKING else: c_lst.append("--release") cmd_str = " ".join(c_lst) print(cmd_str) _, _, exit_code_int = gf_core_cli.run(cmd_str) # IMPORTANT!! - if "go build" returns a non-zero exit code in some environments (CI) we # want to fail with a non-zero exit code as well - this way other CI # programs will flag builds as failed. if not exit_code_int == 0: if p_exit_on_fail_bool: exit(exit_code_int) os.chdir(cwd_str) # return to initial dir
def list_changed_apps(p_apps_changes_deps_map, p_commits_lookback_int = 1, p_mark_all_bool = False): assert isinstance(p_apps_changes_deps_map, dict) assert "apps_gf_packages_map" in p_apps_changes_deps_map.keys() assert "system_packages_lst" in p_apps_changes_deps_map.keys() assert isinstance(p_commits_lookback_int, int) apps_gf_packages_map = p_apps_changes_deps_map['apps_gf_packages_map'] system_packages_lst = p_apps_changes_deps_map['system_packages_lst'] assert isinstance(system_packages_lst, list) changed_apps_files_map = { # IMPORTANT!! - these are all apps that have either "go" or "web" changed. this is # needed because when building in CI # even if only Go files changed we need Web code built as well so that # the final container can be built in its full form. "all": {}, "go": {}, "web": {}, } #------------------------ # DEBUGGING - mark all apps as changed if p_mark_all_bool: for a, _ in apps_gf_packages_map.items(): changed_apps_files_map["all"][a] = ["all"] changed_apps_files_map["go"][a] = ["all"] changed_apps_files_map["web"][a] = ["all"] return changed_apps_files_map #------------------------ # latest_commit_hash_str, _ = gf_core_cli.run('git rev-parse HEAD') # assert len(latest_commit_hash_str) == 32 #------------------------ # FIX!! - dont just look 1 commit back to see what changed. if localy a developer makes several commits and then uploads code # got github(or other) and CI clones it this function then might miss some of the services/apps/packages that changed # several commits back. # instead some mechanism for getting the number of commits that some deployment environment is behind HEAD, # and then use that number for this "p_commits_lookback_int" argument (that would be >1). past_commit_str = "HEAD~%s"%(p_commits_lookback_int) #------------------------ list_str, _, _ = gf_core_cli.run("git diff --name-only HEAD %s"%(past_commit_str), p_print_output_bool=False) #-------------------------------------------------- # IMPORTANT!! - the file that changed affects all apps, so they all need to be marked as changed # and this file added to the list of changed files of all apps. def add_change_to_all_apps(p_file_changed_str, p_type_str): assert p_type_str == "go" or p_type_str == "web" for a, _ in apps_gf_packages_map.items(): if a in changed_apps_files_map[p_type_str].keys(): changed_apps_files_map["all"][a].append(p_file_changed_str) changed_apps_files_map[p_type_str][a].append(p_file_changed_str) else: changed_apps_files_map["all"][a].append(p_file_changed_str) changed_apps_files_map[p_type_str][a] = [p_file_changed_str] #-------------------------------------------------- # IMPORTANT!! - update only the apps that have this files package is marked as a dependancy of def update_dependant_apps_file_lists(p_package_name_str, p_file_path_str, p_type_str): assert p_type_str == "go" or p_type_str == "web" # build out a list of apps that this package (p_package_name_str) is a dependency of dependant_apps_lst = [] for app_str, app_gf_package_lst in p_apps_changes_deps_map['apps_gf_packages_map'].items(): if p_package_name_str in app_gf_package_lst: dependant_apps_lst.append(app_str) # for all apps that are determined to have changed (because they depend on p_package_name_str package) # add this file (p_file_path_str) to those apps lists of changed files. for app_str in dependant_apps_lst: if app_str in changed_apps_files_map.keys(): changed_apps_files_map["all"][app_str].append(p_file_changed_str) changed_apps_files_map[p_type_str][app_str].append(p_file_changed_str) else: changed_apps_files_map["all"][app_str].append(p_file_changed_str) changed_apps_files_map[p_type_str][app_str] = [p_file_changed_str] #-------------------------------------------------- for l in list_str.split('\n'): #------------------------ # GO if l.startswith('go'): # an app changed if l.startswith('go/gf_apps'): package_name_str = l.split('/')[2] # third element in the file path is a package name update_dependant_apps_file_lists(package_name_str, l, "go") # one of the system packages has changed else: for sys_package_str in system_packages_lst: # IMPORTANT!! - one of the system packages has changed, so infer # that all apps have changed. if l.startswith('go/%s'%(sys_package_str)): add_change_to_all_apps(l, "go") #------------------------ # WEB elif l.startswith('web'): if l.startswith('web/src/gf_apps'): package_name_str = l.split('/')[3] # get package_name from the path of the changed file update_dependant_apps_file_lists(package_name_str, l, "web") # IMPORTATN!! - one of the web libs changed, so all apps should be rebuilt # FIX!! - have a better way of determening which apps use this lib, # to avoid rebuilding unaffected apps elif l.startswith('web/libs'): add_change_to_all_apps(l, "web") else: package_name_str = l.split('/')[2] if package_name_str in system_packages_lst: add_change_to_all_apps(l, web) #------------------------ return changed_apps_files_map
def prepare_libs(p_name_str, p_cargo_crate_dir_path_str, p_type_str, p_exit_on_fail_bool=True): assert os.path.isdir(p_cargo_crate_dir_path_str) assert p_type_str == "lib_rust" print(f"{fg('yellow')}PREPARE LIBS{attr(0)}>>>>>>>>>>>>>>>>>>>>>>>>>>>>>") target_build_dir_path_str = os.path.abspath("%s/../../rust/build" % (modd_str)) assert os.path.isdir(target_build_dir_path_str) target_lib_file_path_lst = [] if p_type_str == "lib_rust": release_dir_str = f"{p_cargo_crate_dir_path_str}/target/release" gf_core_cli.run(f"ls -al {release_dir_str}") #------------- # FIX!! - dont hardcode the app_name here like this, but parse Cargo.toml to detect if # one of the Crate types is "staticlib". if p_name_str == "gf_images_jobs": #------------- # EXTERN_LIB target_lib_dir_str = f"{modd_str}/../../rust/build" # f"{modd_str}/../../build/gf_apps/gf_images/tf_lib" prepare_libs__extern(target_lib_dir_str, p_tf_libs_bool=True, p_exit_on_fail_bool=p_exit_on_fail_bool) #------------- # RUST_PY - CPYTHON_EXTENSION - this lib is Python extension written in Rust. # at the moment in GF the convention is for these Rust libs to have a postfix "_py". if p_cargo_crate_dir_path_str.endswith("_py"): # DYNAMIC_LIB # IMPORTANT!! - Rust compiles this dynamic lib with the "lib" prefix, but the Python VM # requires extension libs to not have the "lib" prefix. source__py_lib_file_path_str = f"{release_dir_str}/lib{p_name_str}_py.so" target__py_lib_file_path_str = f"{target_build_dir_path_str}/{p_name_str}_py.so" assert os.path.isfile(source__py_lib_file_path_str) target_lib_file_path_lst.append((source__py_lib_file_path_str, target__py_lib_file_path_str)) else: # STATIC_LIB source__static_lib_file_path_str = f"{release_dir_str}/lib{p_name_str}.a" assert os.path.isfile(source__static_lib_file_path_str) target_lib_file_path_lst.append( (source__static_lib_file_path_str, target_build_dir_path_str)) # DYNAMIC_LIB source__dynamic_lib_file_path_str = f"{release_dir_str}/lib{p_name_str}.so" assert os.path.isfile(source__dynamic_lib_file_path_str) target_lib_file_path_lst.append( (source__dynamic_lib_file_path_str, target_build_dir_path_str)) #------------- # ALL else: target_lib_file_path_lst.append( (f"{release_dir_str}/lib{p_name_str}.so"), target_build_dir_path_str) #------------- # COPY_FILES for source_f, target_f in target_lib_file_path_lst: c_str = "cp %s %s" % (source_f, target_f) _, _, exit_code_int = gf_core_cli.run( c_str) # gf_cli_utils.run_cmd(c_str) # IMPORTANT!! - if "go build" returns a non-zero exit code in some environments (CI) we # want to fail with a non-zero exit code as well - this way other CI # programs will flag builds as failed. if not exit_code_int == 0: if p_exit_on_fail_bool: exit(exit_code_int)
def minify_js(p_js_target_file_str, p_js_files_lst, p_log_fun): p_log_fun("FUN_ENTER", "gf_web__build.minify_js()") cmd_lst = [ "uglifyjs", f"--output {p_js_target_file_str}", " ".join(p_js_files_lst), ] gf_core_cli.run(" ".join(cmd_lst)) #--------------------------------------------------- # def build_page(p_page_name_str, # p_build_dir_str, # p_page_info_map, # p_log_fun): # p_log_fun("FUN_ENTER", "gf_web__build.build_page()") # assert isinstance(p_build_dir_str, basestring) # assert os.path.isdir(p_build_dir_str) # # print('') # print('') # p_log_fun('INFO', '%s>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>%s'%(fg('orange_red_1'), attr(0))) # p_log_fun('INFO', ' %sBUILD PAGE%s - %s%s%s'%(fg('cyan'), attr(0), fg('orange_red_1'), p_page_name_str, attr(0))) # p_log_fun('INFO', 'page type - %s'%(p_page_info_map['type_str'])) # p_log_fun('INFO', '%s>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>%s'%(fg('orange_red_1'), attr(0))) # p_log_fun('INFO', 'build_dir_str - %s'%(p_build_dir_str)) # # #--------------------------------------------------- # def build_typescript(p_out_file_str, p_minified_file_str, p_ts_files_lst): # p_log_fun("FUN_ENTER", "gf_web__build.build_page().build_typescript()") # # cmd_lst = [ # 'tsc', # '--out %s'%(p_out_file_str), # ' '.join(p_ts_files_lst) # ] # gf_core_cli.run(' '.join(cmd_lst)) # # #minify into the same file name as the Typescript compiler output # minify_js(p_minified_file_str, [p_out_file_str], p_log_fun) # #--------------------------------------------------- # # #----------------- # if p_page_info_map['type_str'] == 'ts': # p_log_fun('INFO', '%s------------ TYPESCRIPT --------------------------------%s'%(fg('yellow'), attr(0))) # # out_file_str = p_page_info_map['ts']['out_file_str'] # minified_file_str = p_page_info_map['ts']['minified_file_str'] # ts_files_lst = p_page_info_map['ts']['files_lst'] # # build_typescript(out_file_str, minified_file_str, ts_files_lst) # #----------------- # #COPY LIBS # # if p_page_info_map['ts'].has_key('libs_files_lst'): # p_log_fun('INFO', '%s------------ TS_LIBS -----------------------------%s'%(fg('yellow'), attr(0))) # # if not os.path.isdir(p_build_dir_str): gf_core_cli.run('mkdir -p %s/js/lib'%(p_build_dir_str)) # # for lib_file_str in p_page_info_map['ts']['libs_files_lst']: # gf_core_cli.run('cp %s %s/js/lib'%(lib_file_str, p_build_dir_str)) # #----------------- # #----------------- # #if p_page_info_map['type_str'] == 'js': # # p_log_fun('INFO','%s------------ JAVASCRIPT --------------------------------%s'%(fg('yellow'),attr(0)))) # # #----------------- # # #MINIFY JS # # if p_page_info_map['js'].has_key('minified_file_str'): # # p_log_fun('INFO','------------ JS_MINIFY ---------------------------') # # minified_file_str = p_page_info_map['js']['minified_file_str'] # # js_files_lst = p_page_info_map['js']['files_lst'] # # # # minify_js(minified_file_str, js_files_lst, p_log_fun) # # #----------------- # # #COPY JS LIBS # # # # if p_page_info_map['js'].has_key('libs_files_lst'): # # p_log_fun('INFO','------------ JS_LIBS -----------------------------') # # # # for lib_file_str in p_page_info_map['js']['libs_files_lst']: # # gf_core_cli.run('cp %s %s/js/lib'%(lib_file_str, p_build_dir_str)) # #----------------- # #CSS # # if p_page_info_map.has_key('css'): # p_log_fun('INFO', '%s------------ CSS ---------------------------------%s'%(fg('yellow'), attr(0))) # css_files_lst = p_page_info_map['css']['files_lst'] # # for f_tpl in css_files_lst: # assert len(f_tpl) == 2 # # src_file_str, dest_dir_src = f_tpl # # if not os.path.isdir(dest_dir_src): gf_core_cli.run('mkdir -p %s'%(dest_dir_src)) # gf_core_cli.run('cp %s %s'%(src_file_str, dest_dir_src)) # #----------------- # #TEMPLATES # if p_page_info_map.has_key('templates'): # p_log_fun('INFO', '%s------------ TEMPLATES -----------------------------%s'%(fg('yellow'), attr(0))) # # assert p_page_info_map['templates'].has_key('files_lst') # templates_files_lst = p_page_info_map['templates']['files_lst'] # assert isinstance(templates_files_lst, list) # # # #COPY_TEMPLATE_FILES - copy them from their source location to the desired build location # for tmpl_file_str, tmpl_target_dir_str in templates_files_lst: # print('tmpl_file_str - %s'%(tmpl_file_str)) # # assert os.path.isfile(tmpl_file_str) # # #if target template dir doesnt exist, create it # if not os.path.isdir(tmpl_target_dir_str): gf_core_cli.run('mkdir -p %s'%(tmpl_target_dir_str)) # # gf_core_cli.run('cp %s %s'%(tmpl_file_str, tmpl_target_dir_str)) # #----------------- # #FILES_TO_COPY # # if p_page_info_map.has_key('files_to_copy_lst'): # p_log_fun('INFO', '%s------------ FILES_TO_COPY -----------------------%s'%(fg('yellow'), attr(0))) # files_to_copy_lst = p_page_info_map['files_to_copy_lst'] # # for f_tpl in files_to_copy_lst: # src_file_str, dest_dir_src = f_tpl # # gf_core_cli.run('cp %s %s'%(src_file_str, dest_dir_src)) # #-----------------
def run(p_full_image_name_str, p_log_fun, p_container_name_str=None, p_ports_map=None, p_volumes_map=None, p_hostname_str=None, p_host_network_bool=False, p_detached_bool=True, p_exit_on_fail_bool=False, p_docker_sudo_bool=True): assert isinstance(p_full_image_name_str, str) print("") print("RUNNING DOCKER CONTAINER - %s" % (p_full_image_name_str)) cmd_lst = [] if p_docker_sudo_bool: cmd_lst.append("sudo") cmd_lst.extend([ "docker run", "--restart=always", ]) # CONTAINER_NAME if not p_container_name_str == None: cmd_lst.append("--name %s" % (p_container_name_str)) # PORTS if not p_ports_map == None: for host_port_str, container_port_str in p_ports_map.items(): # IMPORTANT!! - "-p" publish a container's port or a range of ports to the host. cmd_lst.append("-p %s:%s" % (host_port_str, container_port_str)) # VOLUMES if not p_volumes_map == None: for host_dir_str, container_dir_str in p_volumes_map.items(): # IMPORTANT!! - "-v" - mount a host directory into a particular directory path in the # container filesystem. cmd_lst.append("-v %s:%s" % (host_dir_str, container_dir_str)) # HOSTNAME if not p_hostname_str == None: cmd_lst.append("-h %s" % (p_hostname_str)) # HOST_NETWORK if p_host_network_bool: cmd_lst.append("--net=host") # DETACHED if p_detached_bool: cmd_lst.append("-d") # IMAGE_NAME cmd_lst.append(p_full_image_name_str) c_str = " ".join(cmd_lst) p_log_fun("INFO", " - %s" % (c_str)) stdout_str, stderr_str, exit_code_int = gf_core_cli.run(c_str) if not stderr_str == "": print(stderr_str) # IMPORTANT!! - failure to reach Dcoerk daemon should always exit. its not a expected failure. if "Cannot connect to the Docker daemon" in stderr_str: exit(1) # IMPORTANT!! - if command returns a non-zero exit code in some environments (CI) we # want to fail with that a non-zero exit code - this way CI will flag builds as failed. # in other scenarious its acceptable for this command to fail, and we want the caller # to keep executing. if not exit_code_int == 0: if p_exit_on_fail_bool: exit(exit_code_int) # CONTAINER_ID container_id_str = stdout_str.strip() assert len(container_id_str) == 64 return container_id_str
def process_scripts(): scripts_dom_nodes_lst = soup.findAll("script") # if there are scripts detected in the page if len(scripts_dom_nodes_lst) > 0: js_libs_build_dir_str = f"{p_build_dir_str}/js/lib" gf_core_cli.run(f"mkdir -p {js_libs_build_dir_str}" ) # create dir and all parent dirs for script_dom_node in scripts_dom_nodes_lst: # some <script> tags might just contain source code, and not reference an external JS file. # using .get("src") instead of ["src"] because the "src" DOM attribute might not be present. if script_dom_node.get("src") == None: continue src_str = script_dom_node["src"] if src_str.startswith("http://") or src_str.startswith("https://"): print("EXTERNAL_URL - DO NOTHING") continue main_html_dir_path_str = os.path.dirname(main_html_path_str) assert os.path.isdir(main_html_dir_path_str) local_path_str = os.path.abspath( f"{main_html_dir_path_str}/{src_str}") print(local_path_str) assert os.path.isfile(local_path_str) #----------------- if local_path_str.endswith(".ts"): p_log_fun( "INFO", "%s------------ TYPESCRIPT --------------------------------%s" % (fg("yellow"), attr(0))) #--------------------------------------------------- def build_typescript(p_out_file_str): cmd_lst = [ "tsc", "--module system", # needed with the "--out" option "--target es2017", # "--target es6", # Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports # '--esModuleInterop', f"--out {p_out_file_str}", main_ts_file_str ] cmd_str = " ".join(cmd_lst) print(cmd_str) _, _, return_code_int = gf_core_cli.run(cmd_str) if return_code_int > 0: print("ERROR!! - TypeScript Compilation failed!") exit(-1) # minify into the same file name as the Typescript compiler output target_dir_str = os.path.dirname(p_out_file_str) minify_js(p_out_file_str, [p_out_file_str], p_log_fun) #--------------------------------------------------- main_ts_file_str = local_path_str minified_file_name_str = "%s.min.js" % (".".join( os.path.basename(main_ts_file_str).split(".")[:-1])) minified_file_path_str = f"{p_build_dir_str}/js/{minified_file_name_str}" build_typescript(minified_file_path_str) # HTML_MODIFY - change the src in the html tag to the minified name, and url_base (dont leave relative path) script_dom_node[ "src"] = f"{url_base_str}/js/{minified_file_name_str}" #----------------- elif local_path_str.endswith(".js"): p_log_fun( "INFO", "%s------------ JAVASCRIPT --------------------------------%s" % (fg("yellow"), attr(0))) # IMPORTANT!! - just copy the JS file to the final build dir gf_core_cli.run(f"cp {local_path_str} {js_libs_build_dir_str}") # HTML_MODIFY - change the src in the html tag, to include the url_base (dont leave relative path) script_dom_node[ "src"] = f"{url_base_str}/js/lib/{os.path.basename(local_path_str)}"
def build_page(p_page_name_str, p_build_dir_str, p_build_copy_dir_str, p_page_info_map, p_log_fun): # p_log_fun("FUN_ENTER", "gf_web__build.build_page()") print("") p_log_fun( "INFO", "%s>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>%s" % (fg("orange_red_1"), attr(0))) p_log_fun( "INFO", " %sBUILD PAGE%s - %s%s%s" % (fg("cyan"), attr(0), fg("orange_red_1"), p_page_name_str, attr(0))) p_log_fun( "INFO", "%s>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>%s" % (fg("orange_red_1"), attr(0))) print("") p_log_fun("INFO", f"build_dir_str - {p_build_dir_str}") assert isinstance(p_build_dir_str, str) # make build dir if it doesnt exist if not os.path.isdir(p_build_dir_str): gf_core_cli.run(f"mkdir -p {p_build_dir_str}") if "main_html_path_str" in p_page_info_map.keys(): main_html_path_str = os.path.abspath( p_page_info_map["main_html_path_str"]) assert os.path.isfile(main_html_path_str) assert main_html_path_str.endswith(".html") assert ".".join(os.path.basename(main_html_path_str).split(".") [:-1]) == p_page_name_str p_log_fun("INFO", f"main_html_path_str - {main_html_path_str}") # URL_BASE if "url_base_str" in p_page_info_map.keys(): url_base_str = p_page_info_map["url_base_str"] p_log_fun("INFO", f"url_base_str - {url_base_str}") if "main_html_path_str" in p_page_info_map.keys(): f = open(main_html_path_str, "r") main_html_str = f.read() f.close() soup = BeautifulSoup(main_html_str) #--------------------------------------------------- def process_scripts(): scripts_dom_nodes_lst = soup.findAll("script") # if there are scripts detected in the page if len(scripts_dom_nodes_lst) > 0: js_libs_build_dir_str = f"{p_build_dir_str}/js/lib" gf_core_cli.run(f"mkdir -p {js_libs_build_dir_str}" ) # create dir and all parent dirs for script_dom_node in scripts_dom_nodes_lst: # some <script> tags might just contain source code, and not reference an external JS file. # using .get("src") instead of ["src"] because the "src" DOM attribute might not be present. if script_dom_node.get("src") == None: continue src_str = script_dom_node["src"] if src_str.startswith("http://") or src_str.startswith("https://"): print("EXTERNAL_URL - DO NOTHING") continue main_html_dir_path_str = os.path.dirname(main_html_path_str) assert os.path.isdir(main_html_dir_path_str) local_path_str = os.path.abspath( f"{main_html_dir_path_str}/{src_str}") print(local_path_str) assert os.path.isfile(local_path_str) #----------------- if local_path_str.endswith(".ts"): p_log_fun( "INFO", "%s------------ TYPESCRIPT --------------------------------%s" % (fg("yellow"), attr(0))) #--------------------------------------------------- def build_typescript(p_out_file_str): cmd_lst = [ "tsc", "--module system", # needed with the "--out" option "--target es2017", # "--target es6", # Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports # '--esModuleInterop', f"--out {p_out_file_str}", main_ts_file_str ] cmd_str = " ".join(cmd_lst) print(cmd_str) _, _, return_code_int = gf_core_cli.run(cmd_str) if return_code_int > 0: print("ERROR!! - TypeScript Compilation failed!") exit(-1) # minify into the same file name as the Typescript compiler output target_dir_str = os.path.dirname(p_out_file_str) minify_js(p_out_file_str, [p_out_file_str], p_log_fun) #--------------------------------------------------- main_ts_file_str = local_path_str minified_file_name_str = "%s.min.js" % (".".join( os.path.basename(main_ts_file_str).split(".")[:-1])) minified_file_path_str = f"{p_build_dir_str}/js/{minified_file_name_str}" build_typescript(minified_file_path_str) # HTML_MODIFY - change the src in the html tag to the minified name, and url_base (dont leave relative path) script_dom_node[ "src"] = f"{url_base_str}/js/{minified_file_name_str}" #----------------- elif local_path_str.endswith(".js"): p_log_fun( "INFO", "%s------------ JAVASCRIPT --------------------------------%s" % (fg("yellow"), attr(0))) # IMPORTANT!! - just copy the JS file to the final build dir gf_core_cli.run(f"cp {local_path_str} {js_libs_build_dir_str}") # HTML_MODIFY - change the src in the html tag, to include the url_base (dont leave relative path) script_dom_node[ "src"] = f"{url_base_str}/js/lib/{os.path.basename(local_path_str)}" #----------------- #--------------------------------------------------- def process_css(): p_log_fun( "INFO", "%s------------ CSS ---------------------------------------%s" % (fg("yellow"), attr(0))) css_links_lst = soup.findAll("link", {"type": "text/css"}) target_dir_str = f'{p_build_dir_str}/css/{p_page_name_str}' gf_core_cli.run( f'mkdir -p {target_dir_str}') #create dir and all parent dirs for css in css_links_lst: src_str = css["href"] assert src_str.endswith('.css') or src_str.endswith('.scss') if src_str.startswith('http://') or src_str.startswith('https://'): print('EXTERNAL_URL - DO NOTHING') continue # full paths are relative to the dir holding the main html file (app entry point) full_path_str = os.path.abspath( f'{os.path.dirname(main_html_path_str)}/{src_str}') print(full_path_str) assert os.path.isfile(full_path_str) # SASS if src_str.endswith('.scss'): css_file_name_str = os.path.basename(src_str).replace( '.scss', '.css') final_src_str = f'{target_dir_str}/{css_file_name_str}' gf_core_cli.run(f'sass {full_path_str} {final_src_str}') # HTML_MODIFY - change the src in the html tag, to include the url_base # (dont leave relative path) css["href"] = f'{url_base_str}/css/{p_page_name_str}/{css_file_name_str}' # CSS else: gf_core_cli.run(f'cp {full_path_str} {target_dir_str}') # HTML_MODIFY - change the src in the html tag, to include the url_base (dont leave relative path) css["href"] = f'{url_base_str}/css/{p_page_name_str}/{os.path.basename(full_path_str)}' #--------------------------------------------------- if "main_html_path_str" in p_page_info_map.keys(): process_scripts() process_css() #----------------- # CREATE_FINAL_MODIFIED_HTML - create the html template file in the build dir that contains all # the modified urls for JS/CSS target_html_file_path_str = f'{p_build_dir_str}/templates/{p_page_name_str}/{p_page_name_str}.html' gf_core_cli.run( f'mkdir -p {os.path.dirname(target_html_file_path_str)}') f = open(target_html_file_path_str, 'w+') f.write(soup.prettify()) f.close() #----------------- #----------------- # SUBTEMPLATES if "subtemplates_lst" in p_page_info_map.keys(): process_subtemplates(p_page_name_str, p_build_dir_str, p_page_info_map, p_log_fun) #----------------- # IMPORTANT!! - do after build_copy_dir is created if "files_to_copy_lst" in p_page_info_map.keys(): process_files_to_copy(p_page_info_map, p_log_fun) #----------------- # BUILD_COPY - this propety allows for the build dir of a page to be copied to some other dir after the build is complete. # this has to run after all other build steps complete, so that it includes all the build artifacts. # # IMPORTANT!! - only some pages in some apps define this. gf_solo is one of these apps, it adds this property # to the page defs of all other apps (since gf_solo includes all apps). if not p_build_copy_dir_str == None: print( f"copying {fg('green')}build{attr(0)} dir ({p_build_dir_str}) to {fg('yellow')}{p_build_copy_dir_str}{attr(0)}" ) gf_core_cli.run(f'mkdir -p {p_build_copy_dir_str}') gf_core_cli.run(f'cp -r {p_build_dir_str} {p_build_copy_dir_str}') #----------------- print("") p_log_fun( "INFO", "%s>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>%s END" % (fg("orange_red_1"), attr(0))) print("")
def build_go(p_name_str, p_go_dir_path_str, p_go_output_path_str, p_static_bool=False, p_exit_on_fail_bool=True): assert isinstance(p_static_bool, bool) assert os.path.isdir(p_go_dir_path_str) print(p_go_output_path_str) assert os.path.isdir(os.path.dirname(p_go_output_path_str)) print("=============================") if p_static_bool: print(" -- %sSTATIC BINARY BUILD%s" % (fg("yellow"), attr(0))) print(" -- build %s%s%s service" % (fg("green"), p_name_str, attr(0))) print(" -- go_dir_path - %s%s%s" % (fg("green"), p_go_dir_path_str, attr(0))) print(" -- go_output_path - %s%s%s" % (fg("green"), p_go_output_path_str, attr(0))) cwd_str = os.getcwd() os.chdir(p_go_dir_path_str) # change into the target main package dir # GO_GET print("go get") _, _, exit_code_int = gf_core_cli.run("go get -u") print("") print("") # STATIC_LINKING - when deploying to containers it is not always guaranteed that all # required libraries are present. so its safest to compile to a statically # linked lib. # build time a few times larger then regular, so slow for dev. # "-ldflags '-s'" - omit the symbol table and debug information. # "-a" - forces all packages to be rebuilt if p_static_bool: # https://golang.org/cmd/link/ # IMPORTANT!! - "CGO_ENABLED=0" and "-installsuffix cgo" no longer necessary since golang 1.10. # "CGO_ENABLED=0" we also dont want to disable since Rust libs are used in Go via CGO. # "-extldflags flags" - Set space-separated flags to pass to the external linker args_lst = [ "CGO_ENABLED=0", "GOOS=linux", "go build", "-a", # "-installsuffix cgo", # LINKER_FLAGS # "-ldl" - "-l" provides lib path. links in /usr/lib/libdl.so/.a # this is needed to prevent Rust .a lib errors relating # to undefined references to "dlsym","dladdr" ('''-ldflags '-s -extldflags "-static -ldl"' ''').strip(), "-o %s" % (p_go_output_path_str), ] c_str = " ".join(args_lst) # DYNAMIC_LINKING - fast build for dev. else: c_str = "go build -o %s" % (p_go_output_path_str) print(c_str) _, _, exit_code_int = gf_core_cli.run(c_str) # IMPORTANT!! - if "go build" returns a non-zero exit code in some environments (CI) we # want to fail with a non-zero exit code as well - this way other CI # programs will flag builds as failed. if not exit_code_int == 0: if p_exit_on_fail_bool: exit(exit_code_int) os.chdir(cwd_str) # return to initial dir
def run(p_name_str, p_go_dir_path_str, p_go_output_path_str, p_static_bool=False, p_exit_on_fail_bool=True, p_dynamic_libs_dir_path_str=os.path.abspath("%s/../../rust/build" % (modd_str)), p_go_get_bool=True): assert isinstance(p_static_bool, bool) print("") if p_static_bool: print(" -- %sSTATIC BINARY BUILD%s" % (fg("yellow"), attr(0))) print(" -- build %s%s%s service" % (fg("green"), p_name_str, attr(0))) print(" -- go_dir_path - %s%s%s" % (fg("green"), p_go_dir_path_str, attr(0))) print(" -- go_output_path - %s%s%s" % (fg("green"), p_go_output_path_str, attr(0))) assert os.path.isdir(p_go_dir_path_str) assert os.path.isdir(os.path.dirname(p_go_output_path_str)) print("--------------------") print(f"Go {fg('yellow')}cache dir{attr(0)}:") gf_core_cli.run(f"go env GOCACHE") print("--------------------") cwd_str = os.getcwd() os.chdir(p_go_dir_path_str) # change into the target main package dir #-------------------------------------------------- def get_libs_for_linking(): # RUST_DYNAMIC_LIBS dynamic_libs_dir_path_str = os.path.abspath( f"{modd_str}/../../rust/build") tf_dynamic_libs_dir_path_str = os.path.abspath( f"{modd_str}/../../rust/build/tf_lib/lib") print( f"dynamic libs dir - {fg('green')}{dynamic_libs_dir_path_str}{attr(0)}" ) gf_core_cli.run(f"ls -al {dynamic_libs_dir_path_str}") LD_paths_lst = [ dynamic_libs_dir_path_str, tf_dynamic_libs_dir_path_str ] LD_paths_str = f"LD_LIBRARY_PATH={':'.join(LD_paths_lst)}" return LD_paths_str #-------------------------------------------------- LD_paths_str = get_libs_for_linking() # GO_GET if p_go_get_bool: _, _, exit_code_int = gf_core_cli.run(f"{LD_paths_str} go get -u") print("") print("") #----------------------------- # STATIC_LINKING - when deploying to containers it is not always guaranteed that all # required libraries are present. so its safest to compile to a statically # linked lib. # build time a few times larger then regular, so slow for dev. # "-ldflags '-s'" - omit the symbol table and debug information. if p_static_bool: print(f"{fg('yellow')}STATIC LINKING{attr(0)} --") # https://golang.org/cmd/link/ # IMPORTANT!! - "CGO_ENABLED=0" and "-installsuffix cgo" no longer necessary since golang 1.10. # "CGO_ENABLED=0" we also dont want to disable since Rust libs are used in Go via CGO. # IMPORTANT!! - debug .a files: # "ar -t libgf_images_jobs.a" - get a list of Archived object files in static .a libs. # static library is an archive (ar) of object files. # The object files are usually in the ELF format gf_core_cli.run(f"ldconfig -v") # gf_core_cli.run(f"cp {dynamic_libs_dir_path_str}/libgf_images_jobs.a /usr/lib") args_lst = [ LD_paths_str, # f"LD_LIBRARY_PATH={dynamic_libs_dir_path_str}", # f"LD_LIBRARY_PATH=/usr/lib", # "CGO_ENABLED=0", "GOOS=linux", "go build", # force rebuilding of packages that are already up-to-date. "-a", # "-installsuffix cgo", # LINKER_FLAGS # "-ldflags" - arguments to pass on each go tool link invocation # "-s" - Omit the symbol table and debug information # "-extldflags" - Set space-separated flags to pass to the external linker. # on Alpine builds the GCC toolchain linker "ld" is used. # "-static" - On systems that support dynamic linking, this # overrides -pie and prevents linking with the shared libraries. # "-ldl" - "-l" provides lib path. links in /usr/lib/libdl.so/.a # this is needed to prevent Rust .a lib errors relating # to undefined references to "dlsym","dladdr" # # (f'''-ldflags '-s -extldflags "-t -static -lgf_images_jobs -ldl -lglib"' ''').strip(), # (f'''-ldflags '-s -extldflags "-lm"' ''').strip(), ('''-ldflags '-s -extldflags "-static -ldl"' ''').strip(), "-o %s" % (p_go_output_path_str), ] c_str = " ".join(args_lst) #----------------------------- # DYNAMIC_LINKING - fast build for dev. else: print(f"{fg('yellow')}DYNAMIC LINKING{attr(0)} --") c_str = f"{LD_paths_str} go build -o {p_go_output_path_str}" #----------------------------- print(c_str) _, _, exit_code_int = gf_core_cli.run(c_str) # IMPORTANT!! - if "go build" returns a non-zero exit code in some environments (CI) we # want to fail with a non-zero exit code as well - this way other CI # programs will flag builds as failed. if not exit_code_int == 0: if p_exit_on_fail_bool: exit(exit_code_int) os.chdir(cwd_str) # return to initial dir print("build done...")