def reset_binary_mapping(): global BUILD_MAPPING_FILE global currentProcess with client_utils.SimpleFlock(BUILD_MAPPING_FILE, 60): fp = open(BUILD_MAPPING_FILE) dic = yaml.load(fp) fp.close() try: for section in dic.keys(): if dic[section]['ProcessID'] in currentProcess: if not dic[section]['binaries']: del dic[section] else: dic[section]['ProcessID'] = 0 except Exception as e: logging.debug(e) pass fp = open(BUILD_MAPPING_FILE, 'w') fp.write(yaml.dump(dic, default_flow_style=False)) fp.close()
def copy_build_caliper(target_arch, flag=0): """ target_arch means to build the caliper for the special arch flag mean build for the target or local machine (namely server) 0: means for the target 1: means for the server """ global GEN_DIR, WS_GEN_DIR, BUILD_MAPPING_FILE prev_build_files = [] current_build_files = [] WS_prev_build_files = [] WS_current_build_files = [] if target_arch: arch = target_arch else: arch = 'x86_64' # get the files list of 'cfg' build_folder = os.path.join(caliper_path.BUILD_LOGS, arch) files_list = server_utils.get_cases_def_files(arch) source_build_file = caliper_path.SOURCE_BUILD_FILE des_build_file = os.path.join(TMP_DIR, caliper_path.BUILD_FILE) logging.info("=" * 55) logging.info( "Please Wait while check and copy the files of the tools which was built by other process" ) # Fetch details of previous builds for i in range(0, len(files_list)): # get the directory, such as 'common','server' and so on dir_name = files_list[i].strip().split("/")[-1].split("_")[0] config = ConfigParser.ConfigParser() config.read(files_list[i]) sections = config.sections() for i in range(0, len(sections)): BUILD_MAPPING_FILE = os.path.join(BUILD_MAPPING_DIR, sections[i] + '.yaml') # POPULATING THE binary_mapping.yaml try: with client_utils.SimpleFlock(BUILD_MAPPING_FILE, 60): fp = open(BUILD_MAPPING_FILE) dic = yaml.load(fp) fp.close() try: if dic[sections[i]]['ProcessID'] not in currentProcess: logging.info("=" * 55) logging.info( "Please wait another process is building %s" % sections[i]) temp = yaml.load(open(caliper_path.BUILD_TIME)) count = temp[sections[i]] while dic[sections[i]][ 'ProcessID'] not in currentProcess and count: mins, secs = divmod(count, 60) timeformat = '{:02d}:{:02d}'.format(mins, secs) count = count - 1 sys.stdout.write("\r" + timeformat) sys.stdout.flush() time.sleep(1) with client_utils.SimpleFlock( BUILD_MAPPING_FILE, 60): fp = open(BUILD_MAPPING_FILE) dic = yaml.load(fp) fp.close() if count == 0: with client_utils.SimpleFlock( BUILD_MAPPING_FILE, 60): fp = open(BUILD_MAPPING_FILE) dic = yaml.load(fp) if type(dic) != dict: dic = {} fp.close() if sections[i] in dic.keys(): del dic[sections[i]] fp = open(BUILD_MAPPING_FILE, 'w') fp.write( yaml.dump(dic, default_flow_style=False)) fp.close() if not dic[sections[i]]['binaries']: logging.info("%s BUILDING FAILED" % sections[i]) except KeyError: continue except Exception as e: logging.debug(e) sys.exit(1) temp = 0 WS_current_build_files = getAllFilesRecursive(WS_GEN_DIR) for j in dic[sections[i]]['binaries']: if j not in WS_current_build_files: if j != BUILD_MAPPING_FILE: WS_Dir = os.path.join(WS_GEN_DIR, '/'.join(j.split('/')[5:-1])) try: os.makedirs(WS_Dir) except: pass if temp == 0: temp = 1 logging.info("=" * 55) logging.info("COPYING %s's Binaries" % sections[i]) shutil.copy(j, WS_Dir) for log in glob.glob(os.path.join(build_folder, sections[i] + '*')): shutil.copy(log, FOLDER.build_dir) logging.info("=" * 55) return 0
def build_caliper(target_arch, flag=0, clear=0): """ target_arch means to build the caliper for the special arch flag mean build for the target or local machine (namely server) 0: means for the target 1: means for the server """ copy = 0 global GEN_DIR, WS_GEN_DIR, BUILD_MAPPING_FILE, BUILD_MAPPING_DIR GEN_DIR = caliper_path.GEN_DIR WS_GEN_DIR = os.path.join(FOLDER.workspace, 'binary') prev_build_files = [] current_build_files = [] WS_prev_build_files = [] WS_current_build_files = [] if target_arch: arch = target_arch else: arch = 'x86_64' # get the files list of 'cfg' files_list = server_utils.get_cases_def_files(arch) logging.debug("config files are %s" % files_list) BUILD_MAPPING_DIR = os.path.join(BUILD_MAPPING_DIR, arch) if not os.path.exists(BUILD_MAPPING_DIR): try: os.makedirs(BUILD_MAPPING_DIR) except: pass source_build_file = caliper_path.SOURCE_BUILD_FILE des_build_file = os.path.join(TMP_DIR, caliper_path.BUILD_FILE) logging.info("destination file of building is %s" % des_build_file) set_signals() # check and delete those binaries if it is already built if -c is used if clear: logging.info("=" * 55) logging.info( "WARNING: Please wait, dont run any other instance of caliper") for i in range(0, len(files_list)): # get the directory, such as 'common','server' and so on dir_name = files_list[i].strip().split("/")[-1].split("_")[0] config = ConfigParser.ConfigParser() config.read(files_list[i]) sections = config.sections() for i in range(0, len(sections)): BUILD_MAPPING_FILE = os.path.join(BUILD_MAPPING_DIR, sections[i] + '.yaml') with client_utils.SimpleFlock(BUILD_MAPPING_FILE, 60): fp = open(BUILD_MAPPING_FILE) dic = yaml.load(fp) fp.close() if type(dic) != dict: dic = {} if sections[i] in dic.keys(): for file in dic[sections[i]]['binaries']: try: shutil.rmtree(file) except: pass dic[sections[i]]['binaries'] = [] dic[sections[i]]['ProcessID'] = 0 fp = open(BUILD_MAPPING_FILE, 'w') fp.write(yaml.dump(dic, default_flow_style=False)) fp.close() logging.info("It is safe to run caliper now") logging.info("=" * 55) #STARING THE BUILD for i in range(0, len(files_list)): # get the directory, such as 'common','server' and so on dir_name = files_list[i].strip().split("/")[-1].split("_")[0] config = ConfigParser.ConfigParser() config.read(files_list[i]) sections = config.sections() for i in range(0, len(sections)): BUILD = 0 BUILD_MAPPING_FILE = os.path.join(BUILD_MAPPING_DIR, sections[i] + '.yaml') reset_binary_mapping() try: #Lock the file and modify it if this is the first process which is building the tool with client_utils.SimpleFlock(BUILD_MAPPING_FILE, 60): fp = open(BUILD_MAPPING_FILE) dic = yaml.load(fp) if type(dic) != dict: dic = {} fp.close() if sections[i] not in dic.keys(): dic[sections[i]] = {} dic[sections[i]]['binaries'] = [] dic[sections[i]]['ProcessID'] = os.getpid() BUILD = 1 fp = open(BUILD_MAPPING_FILE, 'w') fp.write(yaml.dump(dic, default_flow_style=False)) fp.close() #checking if binary field is empty, empty means that the previous build is a failure if not dic[sections[i]]['binaries']: BUILD = 1 # Checking if the tool if already built or is in the process of being built by another process if dic[sections[i]]['ProcessID'] not in currentProcess: # We shall continue to build the next tools and we'll copy these binaries later logging.info("=" * 55) logging.info( "%s is being built by someother process, we'll build the remaining tools" % sections[i]) continue except Exception as e: logging.debug(e) sys.exit(1) if BUILD == 0: #Collecting the build files in the Workspace binary dir and main binary dir WS_prev_build_files = getAllFilesRecursive(WS_GEN_DIR) prev_build_files = getAllFilesRecursive(GEN_DIR) #checking if the required binaries are present or not in the main binary dir for j in dic[sections[i]]['binaries']: if j not in prev_build_files: if j != BUILD_MAPPING_FILE: #the binaries are not present we have to build it BUILD = 1 if BUILD == 1: if os.path.exists(des_build_file): os.remove(des_build_file) shutil.copyfile(os.path.abspath(source_build_file), des_build_file) try: result = generate_build(config, sections[i], dir_name, des_build_file) except Exception, e: logging.info(e) else: if result: return result result = build_each_tool(dir_name, sections[i], des_build_file, target_arch) if result: build_flag = server_utils.get_fault_tolerance_config( "fault_tolerance", "build_error_continue") with client_utils.SimpleFlock(BUILD_MAPPING_FILE, 60): dic[sections[i]]['ProcessID'] = 0 dic[sections[i]]['binaries'] = [] fp = open(BUILD_MAPPING_FILE, 'w') fp.write(yaml.dump(dic, default_flow_style=False)) fp.close() if build_flag == 1: #Build has failed so delete the section entry in the build_mapping.yaml continue else: return result if os.path.exists(des_build_file): os.remove(des_build_file) else: #Copy the generated binaries to the Workspace binaries for j in dic[sections[i]]['binaries']: if j not in WS_prev_build_files: if j != BUILD_MAPPING_FILE: WS_Dir = os.path.join(WS_GEN_DIR, '/'.join(j.split('/')[5:-1])) try: os.makedirs(WS_Dir) except: pass shutil.copy(j, WS_Dir) logging.info("=" * 55) logging.info("%s is already build", sections[i]) #get the binary present in the WS binary dir #WS_prev_build_dir - WS_current_build_dir = "TOOL CHAIN RELATED BINARIES" #Copy the ToolChainRelated binaries to the main binary folder WS_current_build_files = getAllFilesRecursive(WS_GEN_DIR) for files in WS_current_build_files: if files not in WS_prev_build_files: deflocation = os.path.join(str(GEN_DIR), '/'.join(files.split('/')[6:])) try: os.makedirs('/'.join(deflocation.split('/')[:-1])) except: pass if not os.path.exists(deflocation): shutil.copy(files, deflocation) (dic[sections[i]]['binaries']).append(str(deflocation)) dic[sections[i]]['ProcessID'] = 0 with client_utils.SimpleFlock(BUILD_MAPPING_FILE, 60): fp = open(BUILD_MAPPING_FILE) temp = yaml.load(fp) if type(temp) != dict: temp = {} fp.close() copy_dic(temp, dic, sections[i]) fp = open(BUILD_MAPPING_FILE, 'w') fp.write(yaml.dump(dic, default_flow_style=False)) fp.close()