def pool_synchrosize(): global error_occured global error_execution if processor_availlable <= 1: #in this case : nothing to synchronise return debug.verbose("wait queue process ended\n") # Wait for queue to empty while not work_queue.empty() \ and error_occured == False: time.sleep(0.2) pass # Wait all thread have ended their current process while current_thread_working != 0 \ and error_occured == False: time.sleep(0.2) pass if error_occured == False: debug.verbose("queue is empty") else: un_init() debug.debug("Thread return with error ... ==> stop all the pool") if error_execution["id"] == -1: debug.error("Pool error occured ... (No return information on Pool)") return debug.error("Error in an pool element : [" + str(error_execution["id"]) + "]", crash=False) debug.debug(env.print_pretty(error_execution["cmd"]), force=True) debug.print_compilator(str(error_execution["out"][0])) debug.print_compilator(str(error_execution["err"][0])) if error_execution["return"] == 2: debug.error("can not compile file ... [keyboard interrrupt]") else: debug.error("can not compile file ... return value : " + str(error_execution["return"]))
def make_package_binary_lib(self, path_package, pkg_name, base_pkg_path, heritage_list, static): debug.debug("make_package_binary_lib [START]") copy_list={} path_package_lib = os.path.join(path_package, self.pkg_path_lib) if static == False: #copy all shred libs... tools.create_directory_of_file(path_package_lib) debug.verbose("libs for " + str(pkg_name) + ":") for heritage in heritage_list.list_heritage: debug.debug("sub elements: " + str(heritage.name)) file_src = self.get_build_file_dynamic(heritage.name) debug.verbose(" has directory: " + file_src) if os.path.isfile(file_src): debug.debug(" need copy: " + file_src + " to " + path_package_lib) #copy all data: # TODO : We can have a problem when writing over library files ... tools.copy_file(file_src, os.path.join(path_package_lib, os.path.basename(file_src)), in_list=copy_list) #real copy files ret_copy = tools.copy_list(copy_list) ret_remove = False if self.pkg_path_lib != "": # remove unneded files (NOT folder ...) ret_remove = tools.clean_directory(path_package_lib, copy_list) debug.debug("make_package_binary_lib [STOP]") return ret_copy or ret_remove
def configure_module(self, target, module): # add element flags to export for elem in self._export_flags: debug.verbose("add element :" + str(elem) + " elems=" + str(self._export_flags[elem])) module.add_flag(elem, self._export_flags[elem], export=True) # add module dependency if self._export_depends != []: module.add_depend(self._export_depends) # add exporting sources if self._export_src != []: module.add_src_file(self._export_src) # add export path if self._export_path != []: # no control on API module._add_path(self._export_path, export=True) # Export all actions ... for elem in self._action_on_state: level, name, action = self._action_on_state[elem] target.add_action(elem, level, name, action) for elem in self._headers: module.add_header_file( elem["list"], destination_path=elem["dst"], clip_path=elem["clip"], recursive=elem["recursive"]) if self._version != None: module.set_pkg("VERSION", self._version);
async def streaming(_response): debug.info("streaming " + str(_response)) total_size = 0 temporary_file = os.path.join(_app.config['REST_TMP_DATA'], str(tmp_value) + ".tmp") if not os.path.exists(_app.config['REST_TMP_DATA']): os.makedirs(_app.config['REST_TMP_DATA']) if not os.path.exists(_app.config['REST_MEDIA_DATA']): os.makedirs(_app.config['REST_MEDIA_DATA']) file_stream = open(temporary_file, "wb") sha1 = hashlib.sha512() while True: body = await _request.stream.read() if body is None: debug.warning("empty body") break total_size += len(body) debug.verbose("body " + str(len(body)) + "/" + str(total_size)) file_stream.write(body) sha1.update(body) file_stream.close() print("SHA512: " + str(sha1.hexdigest())) destination_filename = os.path.join(_app.config['REST_MEDIA_DATA'], str(sha1.hexdigest())) if os.path.isfile(destination_filename) == True: answer_data = { "size": total_size, "sha512": str(sha1.hexdigest()), 'filename': _request.headers["filename"], 'mime-type': _request.headers["mime-type"], "already_exist": True, } await _response.write( json.dumps(answer_data, sort_keys=True, indent=4)) return # move the file shutil.move(temporary_file, destination_filename) # collect media info ... media_info = MediaInfo.parse(destination_filename) data_metafile = { "sha512": str(sha1.hexdigest()), "size": total_size, 'filename': _request.headers["filename"], 'mime-type': _request.headers["mime-type"], 'media-info': json.loads(media_info.to_json()) } tools.file_write_data( destination_filename + ".meta", json.dumps(data_metafile, sort_keys=True, indent=4)) answer_data = { "size": total_size, "sha512": str(sha1.hexdigest()), 'filename': _request.headers["filename"], 'mime-type': _request.headers["mime-type"], "already_exist": True, } await _response.write( json.dumps(answer_data, sort_keys=True, indent=4))
def un_init(): global exit_flag # Notify threads it's time to exit exit_flag = True if processor_availlable > 1: # Wait for all threads to complete for tmp in threads: debug.verbose("join thread ...") tmp.join() debug.verbose("Exiting ALL Threads")
def create_zip_file(files, base_output, outputFile): debug.debug("Create Zip : '" + outputFile + "'") tools.create_directory_of_file(outputFile) debug.debug(" from '" + str(files) + "'") if tools.get_type_string(files) == "string": files = [files] zf = zipfile.ZipFile(outputFile, mode='w') for elem in files: debug.verbose(" ADD zip = " + str(elem) + " ==> " + base_output + "/" + elem[len(os.path.dirname(elem)):]) zf.write(elem, base_output + "/" + elem[len(os.path.dirname(elem)):]) zf.close()
def file_read_data(path, binary=False): debug.verbose("path= " + path) if not os.path.isfile(path): return "" if binary == True: file = open(path, "rb") else: file = open(path, "r") data_file = file.read() file.close() return data_file
def get_element_name(_path): base_name = os.path.basename(_path) debug.verbose(" path: '" + _path + "' ==> basename='" + base_name + "'") if len(base_name) <= 3 + len(__base_element_name): # reject it, too small return None base_name = base_name[:-3] if base_name[:len(__base_element_name)] != __base_element_name: # reject it, wrong start file return None debug.verbose(" ==> '" + base_name[len(__base_element_name):] + "'") return base_name[len(__base_element_name):]
def store_command(cmd_line, file): # write cmd line only after to prevent errors ... if file == "" \ or file == None: return debug.verbose("create cmd file: " + file) # Create directory: create_directory_of_file(file) # Store the command Line: file2 = open(file, "w") file2.write(cmd_line) file2.flush() file2.close()
def load_macro(name): global __macro_list debug.debug("load macro: " + name) if len(__macro_list) == 0: debug.error("No macro to compile !!!") debug.debug("list macro: " + str(__macro_list)) for mod in __macro_list: if mod[0] == name: debug.verbose("add to path: '" + os.path.dirname(mod[1]) + "'") sys.path.append(os.path.dirname(mod[1])) debug.verbose("import macro : '" + env.get_build_system_base_name() + __start_macro_name + name + "'") the_macro = __import__(env.get_build_system_base_name() + __start_macro_name + name) return the_macro raise KeyError("No entry for : " + name)
def run_in_pool(cmd_line, comment, store_cmd_line="", store_output_file="", depend_data=None): global current_id_execution if processor_availlable <= 1: debug.print_element(comment[0], comment[1], comment[2], comment[3]) run_command(cmd_line, store_cmd_line, file=comment[3], store_output_file=store_output_file, depend_data=depend_data) return # multithreaded mode init() # Fill the queue queue_lock.acquire() debug.verbose("add : in pool cmd_line") work_queue.put(["cmd_line", cmd_line, comment, store_cmd_line, current_id_execution, store_output_file, depend_data]) current_id_execution +=1; queue_lock.release()
def exist(lib_name, list_target_name, target) : global __system_list debug.verbose("exist= " + lib_name + " in " + str(list_target_name)) find_target = False for target_name in list_target_name: if target_name in __system_list: find_target = True if find_target == False: return False for target_name in reversed(list_target_name): if target_name not in __system_list: continue for data in __system_list[target_name]: if data["name"] == lib_name: # we find it in the List ==> need to check if it is present in the system : if data["loaded"] == False: debug.verbose("add to path: '" + os.path.dirname(data["path"]) + "'") sys.path.append(os.path.dirname(data["path"])) debug.verbose("import system : '" + data["name"] + "'") the_system = __import__(env.get_build_system_base_name() + __start_system_name + target_name + "_" + data["name"]) #create the system module debug.verbose("SYSTEM: request: " + str(data["name"])) if "System" in dir(the_system): data["system"] = the_system.System(target) data["exist"] = data["system"].get_valid() """ if data["exist"] == False: debug.warning("Can not Import: '" + data["name"] + "' ==> disabled") """ else: debug.warning("Not find: '" + data["name"] + "' ==> get exception") return data["exist"] return False
def init(): global is_init; if is_init == True: return debug.verbose("Use Make as a make stadard") sys.path.append(tools.get_run_path()) # create the list of basic folder: basic_folder_list = [] basic_folder_list.append([tools.get_current_path(__file__), True]) # Import all sub path without out and archive for elem_path in os.listdir("."): if os.path.isdir(elem_path) == False: continue if elem_path.lower() == "android" \ or elem_path == "out" : continue debug.debug("Automatic load path: '" + elem_path + "'") basic_folder_list.append([elem_path, False]) # create in a single path the basic list of lutin files (all start with lutin and end with .py) exclude_path = env.get_exclude_search_path() limit_sub_folder = env.get_parse_depth() list_of_lutin_files = [] for elem_path, is_system in basic_folder_list: if is_system == True: limit_sub_folder_tmp = 999999 else: limit_sub_folder_tmp = limit_sub_folder tmp_out = import_path_local(elem_path, limit_sub_folder_tmp, exclude_path, env.get_build_system_base_name()) # add all the elements: for elem in tmp_out: list_of_lutin_files.append(elem) debug.debug("Files specific lutin: ") for elem_path in list_of_lutin_files: debug.debug(" " + elem_path) # simply import element from the basic list of files (single parse ...) builder.import_path(list_of_lutin_files) module.import_path(list_of_lutin_files) system.import_path(list_of_lutin_files) target.import_path(list_of_lutin_files) macro.import_path(list_of_lutin_files) builder.init() is_init = True
def load_target(name, config): global __target_list debug.debug("load target: " + name) if len(__target_list) == 0: debug.error("No target to compile !!!") debug.debug("list target: " + str(__target_list)) for mod in __target_list: if mod[0] == name: debug.verbose("add to path: '" + os.path.dirname(mod[1]) + "'") sys.path.append(os.path.dirname(mod[1])) debug.verbose("import target : '" + env.get_build_system_base_name() + __start_target_name + name + "'") theTarget = __import__(env.get_build_system_base_name() + __start_target_name + name) #create the target tmpTarget = theTarget.Target(config) return tmpTarget raise KeyError("No entry for : " + name)
def create_zip(path, outputFile): debug.debug("Create Zip : '" + outputFile + "'") tools.create_directory_of_file(outputFile) debug.debug(" from '" + str(path) + "'") if tools.get_type_string(path) == "string": path = [path] zf = zipfile.ZipFile(outputFile, mode='w') for elem in path: basePathlen = len(elem) for root, dirnames, filenames in os.walk(elem): # List all files : for filename in filenames: file = os.path.join(root, filename) debug.verbose(" ADD zip = " + str(file) + " ==> " +file[basePathlen:]) zf.write(file, file[basePathlen:]) zf.close()
def create_number_from_version_string(self, data): tmp_data = data.split("-") if len(tmp_data) > 1: data = tmp_data[0] list = data.split(".") if len(list) == 1: list.append("0") if len(list) == 2: list.append("0") if len(list) > 3: list = list[:3] out = 0; offset = 1000**(len(list)-1) for elem in list: out += offset*int(elem) debug.verbose("get : " + str(int(elem)) + " tmp" + str(out)) offset /= 1000 return out
def compile(file, binary, target, depancy, flags, path, name, basic_path, module_src): # file_src = target.get_full_name_source(basic_path, file) file_cmd = target.get_full_name_cmd(name, basic_path, file) # file_dst = target.get_full_name_destination(name, basic_path, file, get_output_type()) file_depend = target.get_full_dependency(name, basic_path, file) file_warning = target.get_full_name_warning(name, basic_path, file) depend_files = create_dependency_files(target, module_src, depancy.src['src'], basic_path) # create the command line befor requesting start: cmd = [ target.javah, "-d", target.get_build_path(name) + target.path_generate_code ] if debug.get_level() >= 5: cmd.append("-verbose") cmd.append("-classpath") cmd.append(target.get_build_path_object(name)) class_to_build = file[:-6] cmd.append(class_to_build) # Create cmd line cmd_line = tools.list_to_str(cmd) file_dst = target.get_build_path(name) + "/generate_header/" + class_to_build.replace(".", "_") + ".h" # check the dependency for this file : if depend.need_re_build(file_dst, None, file_depend, file_cmd, cmd_line) == False: return {"action":"path", "path":target.get_build_path(name) + target.path_generate_code} #tools.create_directory_of_file(file_dst) comment = ["javah", class_to_build.replace(".", "_") + ".h", "<==", class_to_build] #process element multiprocess.run_in_pool(cmd_line, comment, file_cmd, store_output_file = file_warning, depend_data = {"file":file_depend, "data":depend_files}) debug.verbose("file= " + file_dst) #return file_dst return {"action":"path", "path":target.get_build_path(name) + target.path_generate_code}
def make_package_binary_data(self, path_package, pkg_name, base_pkg_path, heritage_list, static): debug.debug("make_package_binary_data [START]") target_shared_path = os.path.join(path_package, self.pkg_path_data) if static == True: path_package_data = os.path.join(target_shared_path, pkg_name) else: path_package_data = target_shared_path tools.create_directory_of_file(path_package_data) # prepare list of copy files copy_list={} debug.debug("heritage for " + str(pkg_name) + ":") for heritage in heritage_list.list_heritage: debug.debug("sub elements: " + str(heritage.name)) path_src = self.get_build_path_data(heritage.name) debug.verbose(" has directory: " + path_src) if os.path.isdir(path_src): if static == True: debug.debug(" need copy: " + path_src + " to " + path_package_data) #copy all data: tools.copy_anything(path_src, path_package_data, recursive=True, force_identical=True, in_list=copy_list) else: debug.debug(" need copy: " + os.path.dirname(path_src) + " to " + path_package_data) #copy all data: tools.copy_anything(os.path.dirname(path_src), path_package_data, recursive=True, force_identical=True, in_list=copy_list) #real copy files ret_copy = tools.copy_list(copy_list) # remove unneded files (NOT folder ...) ret_remove = tools.clean_directory(target_shared_path, copy_list) debug.debug("make_package_binary_data [STOP]") return ret_copy or ret_remove
def run_command_direct(cmd_line): # prepare command line: args = shlex.split(cmd_line) debug.verbose("cmd = " + str(args)) try: # create the subprocess p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except subprocess.CalledProcessError as e: debug.error("subprocess.CalledProcessError : " + str(args)) except: debug.error("Exception on : " + str(args)) # launch the subprocess: output, err = p.communicate() if sys.version_info >= (3, 0): output = output.decode("utf-8") err = err.decode("utf-8") # Check error : if p.returncode == 0: if output == None: return err[:-1]; return output[:-1]; else: debug.warning("get an error cmd " + str(err)) return False
def import_path(path_list): global __system_list global_base = env.get_build_system_base_name() debug.debug("SYSTEM: Init with Files list:") for elem in path_list: sys.path.append(os.path.dirname(elem)) # Get file name: filename = os.path.basename(elem) # Remove .py at the end: filename = filename[:-3] # Remove global base name: filename = filename[len(global_base):] # Check if it start with the local patern: if filename[:len(__start_system_name)] != __start_system_name: debug.extreme_verbose("SYSTEM: NOT-Integrate: '" + filename + "' from '" + elem + "' ==> rejected") continue # Remove local patern system_name = filename[len(__start_system_name):] system_type, system_name = system_name.split('_') debug.verbose("SYSTEM: Integrate: '" + system_type + "':'" + system_name + "' from '" + elem + "'") if system_type in __system_list: __system_list[system_type].append({"name":system_name, "path":elem, "system":None, "loaded":False, "exist":False, "module":None}) else: __system_list[system_type] = [{"name":system_name, "path":elem, "system":None, "loaded":False, "exist":False, "module":None}] debug.verbose("New list system: ") for elem in __system_list: debug.verbose(" " + str(elem)) for val in __system_list[elem]: debug.verbose(" " + str(val["name"]))
def run(self): debug.verbose("Starting " + self.name) global exit_flag global current_thread_working working_set = False while exit_flag == False: self.lock.acquire() if not self.queue.empty(): if working_set == False: current_thread_working += 1 working_set = True data = self.queue.get() self.lock.release() debug.verbose(self.name + " processing '" + data[0] + "'") if data[0]=="cmd_line": comment = data[2] cmd_line = data[1] cmd_store_file = data[3] debug.print_element("[" + str(data[4]) + "][" + str(self.thread_id) + "] " + comment[0], comment[1], comment[2], comment[3]) run_command(cmd_line, cmd_store_file, build_id=data[4], file=comment[3], store_output_file=data[5], depend_data=data[6]) else: debug.warning("unknow request command : " + data[0]) else: if working_set==True: current_thread_working -= 1 working_set=False # no element to parse, just wait ... self.lock.release() time.sleep(0.2) # kill requested ... debug.verbose("Exiting " + self.name)
def import_path(path_list): global __macro_list global_base = env.get_build_system_base_name() debug.debug("TARGET: Init with Files list:") for elem in path_list: sys.path.append(os.path.dirname(elem)) # Get file name: filename = os.path.basename(elem) # Remove .py at the end: filename = filename[:-3] # Remove global base name: filename = filename[len(global_base):] # Check if it start with the local patern: if filename[:len(__start_macro_name)] != __start_macro_name: debug.extreme_verbose("MACRO: NOT-Integrate: '" + filename + "' from '" + elem + "' ==> rejected") continue # Remove local patern target_name = filename[len(__start_macro_name):] debug.verbose("MACRO: Integrate: '" + target_name + "' from '" + elem + "'") __macro_list.append([target_name, elem]) debug.verbose("New list MACRO: ") for elem in __macro_list: debug.verbose(" " + str(elem[0]))
def make_package_binary(self, pkg_name, pkg_properties, base_pkg_path, heritage_list, static): debug.debug("------------------------------------------------------------------------") debug.info("Generate package '" + pkg_name + "' v" + tools.version_to_string(pkg_properties["VERSION"])) debug.debug("------------------------------------------------------------------------") #output path target_outpath = self.get_staging_path(pkg_name) tools.create_directory_of_file(target_outpath) ## Create share datas: self.make_package_binary_data(target_outpath, pkg_name, base_pkg_path, heritage_list, static) ## copy binary files # in Android Package we have no binary element, only shared object ... (and java start file) ## Create libraries (special case of Android...) copy_list={} target_outpath_lib = os.path.join(target_outpath, self.pkg_path_lib) tools.create_directory_of_file(target_outpath_lib) # copy application lib: (needed to lunch ...) file_src = self.get_build_file_dynamic(pkg_name) if os.path.isfile(file_src): debug.debug(" need copy: " + file_src + " to " + target_outpath_lib) tools.copy_file(file_src, os.path.join(target_outpath_lib, os.path.basename(file_src)), in_list=copy_list) # copy other if needed: if static == False: #copy all shared libsh... debug.verbose("libs for " + str(pkg_name) + ":") for heritage in heritage_list.list_heritage: debug.debug("sub elements: " + str(heritage.name)) file_src = self.get_build_file_dynamic(heritage.name) debug.verbose(" has directory: " + file_src) if os.path.isfile(file_src): debug.debug(" need copy: " + file_src + " to " + target_outpath_lib) #copy all data: # TODO : We can have a problem when writing over library files ... tools.copy_file(file_src, os.path.join(target_outpath_lib, os.path.basename(file_src)), in_list=copy_list) #real copy files tools.copy_list(copy_list) if self.pkg_path_lib != "": # remove unneded files (NOT folder ...) tools.clean_directory(target_outpath_lib, copy_list) ## Create generic files: self.make_package_generic_files(target_outpath, pkg_properties, pkg_name, base_pkg_path, heritage_list, static) ## create specific android project (local) pkg_name_application_name = pkg_name if self.config["mode"] == "debug": pkg_name_application_name += "debug" #debug.info("ploppppp: " + str(pkg_properties)) # FINAL_path_JAVA_PROJECT self.path_java_project = os.path.join(target_outpath, "src") if pkg_properties["COMPAGNY_TYPE"] != "": self.path_java_project = os.path.join(self.path_java_project, pkg_properties["COMPAGNY_TYPE"]) if pkg_properties["COMPAGNY_NAME2"] != "": self.path_java_project = os.path.join(self.path_java_project, pkg_properties["COMPAGNY_NAME2"]) self.path_java_project = os.path.join(self.path_java_project, pkg_name_application_name) #FINAL_FILE_ABSTRACTION self.file_final_abstraction = os.path.join(self.path_java_project, pkg_name_application_name + ".java") compleatePackageName = "" if pkg_properties["COMPAGNY_TYPE"] != "": compleatePackageName += pkg_properties["COMPAGNY_TYPE"] + "." if pkg_properties["COMPAGNY_NAME2"] != "": compleatePackageName += pkg_properties["COMPAGNY_NAME2"] + "." compleatePackageName += pkg_name_application_name if "ADMOD_ID" in pkg_properties: pkg_properties["RIGHT"].append("INTERNET") pkg_properties["RIGHT"].append("ACCESS_NETWORK_STATE") debug.print_element("pkg", "absractionFile", "<==", "dynamic file") # Create path : tools.create_directory_of_file(self.file_final_abstraction) # Create file : # java ==> done by ewol wrapper ... (and compiled in the normal compilation system ==> must be find in the dependency list of jar ... tools.create_directory_of_file(target_outpath + "/res/drawable/icon.png"); if "ICON" in pkg_properties.keys() \ and pkg_properties["ICON"] != "": image.resize(pkg_properties["ICON"], target_outpath + "/res/drawable/icon.png", 256, 256) else: # to be sure that we have all time a resource ... tmpFile = open(target_outpath + "/res/drawable/plop.txt", 'w') tmpFile.write('plop\n') tmpFile.flush() tmpFile.close() if pkg_properties["ANDROID_MANIFEST"]!="": debug.print_element("pkg", "AndroidManifest.xml", "<==", pkg_properties["ANDROID_MANIFEST"]) tools.copy_file(pkg_properties["ANDROID_MANIFEST"], target_outpath + "/AndroidManifest.xml", force=True) else: debug.error("missing parameter 'ANDROID_MANIFEST' in the properties ... ") #add properties on wallpaper : # myModule.add_pkg("ANDROID_WALLPAPER_PROPERTIES", ["list", key, title, summary, [["key","value display"],["key2","value display 2"]]) # myModule.add_pkg("ANDROID_WALLPAPER_PROPERTIES", ["list", "testpattern", "Select test pattern", "Choose which test pattern to display", [["key","value display"],["key2","value display 2"]]]) # myModule.add_pkg("ANDROID_WALLPAPER_PROPERTIES", ["bool", key, title, summary, ["enable string", "disable String"]) # myModule.add_pkg("ANDROID_WALLPAPER_PROPERTIES", ["bool", "movement", "Motion", "Apply movement to test pattern", ["Moving test pattern", "Still test pattern"] #copy needed resources : for res_source, res_dest in pkg_properties["ANDROID_RESOURCES"]: if res_source == "": continue tools.copy_file(res_source , target_outpath + "/res/" + res_dest + "/" + os.path.basename(res_source), force=True) # Doc : # http://asantoso.wordpress.com/2009/09/15/how-to-build-android-application-package-apk-from-the-command-line-using-the-sdk-tools-continuously-integrated-using-cruisecontrol/ debug.print_element("pkg", "R.java", "<==", "Resources files") tools.create_directory_of_file(target_outpath + "/src/noFile") android_tool_path = self.path_sdk + "/build-tools/" # find android tool version dirnames = tools.get_list_sub_path(android_tool_path) if len(dirnames) == 0: debug.warning("This does not comport directory: '" + android_tool_path + "'") debug.error("An error occured when getting the tools for android") elif len(dirnames) > 1: dirnames = sorted(dirnames, reverse=True) debug.debug("sort tools directory: '" + str(dirnames) + "' ==> select : " + str(dirnames[0])) android_tool_path += dirnames[0] + "/" # this is to create resource file for android ... (we did not use aset in jar with ewol ... adModResoucepath = "" if "ADMOD_ID" in pkg_properties: adModResoucepath = " -S " + self.path_sdk + "/extras/google/google_play_services/libproject/google-play-services_lib/res/ " cmdLine = android_tool_path + "aapt p -f " \ + "-M " + target_outpath + "/AndroidManifest.xml " \ + "-F " + target_outpath + "/resources.res " \ + "-I " + self.path_sdk + "/platforms/android-" + str(self.board_id) + "/android.jar "\ + "-S " + target_outpath + "/res/ " \ + adModResoucepath \ + "-J " + target_outpath + "/src/ " multiprocess.run_command(cmdLine) tools.create_directory_of_file(target_outpath + "/build/classes/noFile") debug.print_element("pkg", "*.class", "<==", "*.java") #generate android java files: filesString="" """ old : if "ADMOD_ID" in pkg_properties: # TODO : check this I do not think it is really usefull ... ==> write for IDE only ... filesString += self.path_sdk + "/extras/google/google_play_services/libproject/google-play-services_lib/src/android/UnusedStub.java " if len(pkg_properties["ANDROID_WALLPAPER_PROPERTIES"])!=0: filesString += self.path_java_project + pkg_name_application_name + "Settings.java " adModJarFile = "" if "ADMOD_ID" in pkg_properties: adModJarFile = ":" + self.path_sdk + "/extras/google/google_play_services/libproject/google-play-services_lib/libs/google-play-services.jar" cmdLine = "javac " \ + "-d " + self.get_staging_path(pkg_name) + "/build/classes " \ + "-classpath " + self.path_sdk + "/platforms/android-" + str(self.board_id) + "/android.jar" \ + adModJarFile + " " \ + filesString \ + self.file_final_abstraction + " " \ + self.get_staging_path(pkg_name) + "/src/R.java " multiprocess.run_command(cmdLine) """ debug.verbose("heritage .so=" + str(tools.filter_extention(heritage_list.src['dynamic'], ["so"]))) debug.verbose("heritage .jar=" + str(tools.filter_extention(heritage_list.src['src'], ["jar"]))) class_extern = "" upper_jar = tools.filter_extention(heritage_list.src['src'], ["jar"]) #debug.warning("ploppppp = " + str(upper_jar)) for elem in upper_jar: if len(class_extern) > 0: class_extern += ":" class_extern += elem # create enpoint element : cmdLine = "javac " \ + "-d " + target_outpath + "/build/classes " \ + "-classpath " + class_extern + " " \ + target_outpath + "/src/R.java " multiprocess.run_command(cmdLine) debug.print_element("pkg", ".dex", "<==", "*.class") cmdLine = android_tool_path + "dx " \ + "--dex --no-strict " \ + "--output=" + target_outpath + "/build/" + pkg_name_application_name + ".dex " \ + target_outpath + "/build/classes/ " if "ADMOD_ID" in pkg_properties: cmdLine += self.path_sdk + "/extras/google/google_play_services/libproject/google-play-services_lib/libs/google-play-services.jar " # add element to dexification: for elem in upper_jar: # remove android sdk: if elem[-len("android.jar"):] != "android.jar": cmdLine += elem + " " multiprocess.run_command(cmdLine) debug.print_element("pkg", ".apk", "<==", ".dex, assets, .so, res") #builderDebug="-agentlib:jdwp=transport=dt_socket,server=y,address=8050,suspend=y " builderDebug="" # note : set -u not signed application... #+ ":" + self.path_sdk + "/extras/google/google_play_services/libproject/google-play-services_lib/libs/google-play-services.jar " cmdLine = "java -Xmx128M " \ + " -classpath " + self.path_sdk + "/tools/lib/sdklib.jar " \ + builderDebug \ + " com.android.sdklib.build.ApkBuilderMain " \ + target_outpath + "/build/" + pkg_name_application_name + "-unalligned.apk " \ + " -u " \ + " -z " + target_outpath + "/resources.res " \ + " -f " + target_outpath + "/build/" + pkg_name_application_name + ".dex " \ + " -rf " + target_outpath + "/data " multiprocess.run_command(cmdLine) # doc : # http://developer.android.com/tools/publishing/app-signing.html # Create a key for signing your application: # keytool -genkeypair -v -keystore AndroidKey.jks -storepass Pass__AndroidDebugKey -alias alias__AndroidDebugKey -keypass PassKey__AndroidDebugKey -keyalg RSA -validity 36500 if self.config["mode"] == "debug": debug.print_element("pkg", ".apk(signed debug)", "<==", ".apk (not signed)") # verbose mode : #debugOption = "-verbose -certs " debugOption = "" cmdLine = "jarsigner " \ + debugOption \ + "-keystore " + tools.get_current_path(__file__) + "/AndroidDebugKey.jks " \ + " -sigalg SHA1withRSA -digestalg SHA1 " \ + " -storepass Pass__AndroidDebugKey " \ + " -keypass PassKey__AndroidDebugKey " \ + target_outpath + "/build/" + pkg_name_application_name + "-unalligned.apk " \ + " alias__AndroidDebugKey" multiprocess.run_command(cmdLine) tmpFile = open("tmpPass.boo", 'w') tmpFile.write("\n") tmpFile.flush() tmpFile.close() else: print("On release mode we need the file : and key an pasword to sign the application ...") debug.print_element("pkg", ".apk(signed debug)", "<==", ".apk (not signed)") cmdLine = "jarsigner " \ + " -keystore " + pkg_properties["ANDROID_SIGN"] + " " \ + " -sigalg SHA1withRSA -digestalg SHA1 " \ + target_outpath + "/build/" + pkg_name_application_name + "-unalligned.apk " \ + " " + pkg_name_application_name multiprocess.run_command(cmdLine) cmdLine = "jarsigner " \ + " -verify -verbose -certs " \ + " -sigalg SHA1withRSA -digestalg SHA1 " \ + target_outpath + "/build/" + pkg_name_application_name + "-unalligned.apk " multiprocess.run_command(cmdLine) debug.print_element("pkg", ".apk(aligned)", "<==", ".apk (not aligned)") tools.remove_file(target_outpath + "/" + pkg_name_application_name + ".apk") # verbose mode : -v cmdLine = android_tool_path + "zipalign 4 " \ + target_outpath + "/build/" + pkg_name_application_name + "-unalligned.apk " \ + target_outpath + "/" + pkg_name_application_name + ".apk " multiprocess.run_command(cmdLine) # copy file in the final stage : tools.copy_file(target_outpath + "/" + pkg_name_application_name + ".apk", self.get_final_path() + "/" + pkg_name_application_name + ".apk", force=True)
def add_action(self, name_of_state="PACKAGE", level=5, name="no-name", action=None): debug.verbose("add action : " + name) if name_of_state not in self.action_on_state: self.action_on_state[name_of_state] = [[level, name, action]] else: self.action_on_state[name_of_state].append([level, name, action])
def import_path_local(path, limit_sub_folder, exclude_path = [], base_name = ""): out = [] debug.verbose("lutin files: " + str(path) + " [START]") if limit_sub_folder == 0: debug.debug("Subparsing limitation append ...") return [] try: list_files = os.listdir(path) except: # an error occure, maybe read error ... debug.warning("error when getting subdirectory of '" + str(path) + "'") return [] if path in exclude_path: debug.debug("find '" + str(path) + "' in exclude_path=" + str(exclude_path)) return [] # filter elements: tmp_list_lutin_file = filter_name_and_file(path, list_files, base_name + "*.py") debug.verbose("lutin files: " + str(path) + " : " + str(tmp_list_lutin_file)) # Import the module: for filename in tmp_list_lutin_file: out.append(os.path.join(path, filename)) debug.extreme_verbose(" Find a file : '" + str(out[-1]) + "'") need_parse_sub_folder = True rm_value = -1 # check if we need to parse sub_folder if len(tmp_list_lutin_file) != 0: need_parse_sub_folder = False # check if the file "lutin_parse_sub.py" is present ==> parse SubFolder (force and add +1 in the resursing if base_name + "ParseSubFolders.txt" in list_files: debug.debug("find SubParser ... " + str(base_name + "ParseSubFolders.txt") + " " + path) data_file_sub = tools.file_read_data(os.path.join(path, base_name + "ParseSubFolders.txt")) if data_file_sub == "": debug.debug(" Empty file Load all subfolder in the worktree in '" + str(path) + "'") need_parse_sub_folder = True rm_value = 0 else: list_sub = data_file_sub.split("\n") debug.debug(" Parse selected folders " + str(list_sub) + " no parse local folder directory") need_parse_sub_folder = False for folder in list_sub: if folder == "" \ or folder == "/": continue; tmp_out = import_path_local(os.path.join(path, folder), 1, exclude_path, base_name) # add all the elements: for elem in tmp_out: out.append(elem) if need_parse_sub_folder == True: list_folders = filter_path(path, list_files) for folder in list_folders: tmp_out = import_path_local(os.path.join(path, folder), limit_sub_folder - rm_value, exclude_path, base_name) # add all the elements: for elem in tmp_out: out.append(elem) return out
def run_command(cmd_line, store_cmd_line="", build_id=-1, file="", store_output_file="", depend_data=None): global error_occured global exit_flag global current_id_execution global error_execution # prepare command line: args = shlex.split(cmd_line) debug.verbose("cmd = " + str(args)) try: # create the subprocess p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except subprocess.CalledProcessError as e: debug.error("subprocess.CalledProcessError : TODO ...") except: debug.error("Exception on : " + str(args)) # launch the subprocess: output, err = p.communicate() if sys.version_info >= (3, 0): output = output.decode("utf-8") err = err.decode("utf-8") # store error if needed: tools.store_warning(store_output_file, output, err) # Check error : if p.returncode == 0: debug.debug(env.print_pretty(cmd_line)) queue_lock.acquire() if depend_data != None: depend.create_dependency_file(depend_data['file'], depend_data['data']) # TODO : Print the output all the time .... ==> to show warnings ... if build_id >= 0 and (output != "" or err != ""): debug.warning("output in subprocess compiling: '" + file + "'") if output != "": debug.print_compilator(output) if err != "": debug.print_compilator(err) queue_lock.release() else: error_occured = True exit_flag = True # if No ID : Not in a multiprocess mode ==> just stop here if build_id < 0: debug.debug(env.print_pretty(cmd_line), force=True) debug.print_compilator(output) debug.print_compilator(err) if p.returncode == 2: debug.error("can not compile file ... [keyboard interrrupt]") else: debug.error("can not compile file ... ret : " + str(p.returncode)) else: # in multiprocess interface queue_lock.acquire() # if an other write an error before, check if the current process is started before ==> then is the first error if error_execution["id"] >= build_id: # nothing to do ... queue_lock.release() return; error_execution["id"] = build_id error_execution["cmd"] = cmd_line error_execution["return"] = p.returncode error_execution["err"] = err, error_execution["out"] = output, queue_lock.release() # not write the command file... return debug.verbose("done 3") # write cmd line only after to prevent errors ... tools.store_command(cmd_line, store_cmd_line)
def import_path_local(path, limit_sub_folder, exclude_path=[], base_name=""): out = [] debug.verbose("qworktree files: " + str(path) + " [START]") if limit_sub_folder == 0: debug.debug("Subparsing limitation append ...") return [] list_files = os.listdir(path) try: list_files = os.listdir(path) except: # an error occure, maybe read error ... debug.warning("error when getting subdirectory of '" + str(path) + "'") return [] if path in exclude_path: debug.debug("find '" + str(path) + "' in exclude_path=" + str(exclude_path)) return [] # filter elements: tmp_list_qworktree_file = filter_name_and_file(path, list_files, base_name + "*.py") debug.verbose("qworktree files: " + str(path) + " : " + str(tmp_list_qworktree_file)) # Import the module: for filename in tmp_list_qworktree_file: out.append(os.path.join(path, filename)) debug.extreme_verbose(" Find a file : '" + str(out[-1]) + "'") need_parse_sub_folder = True rm_value = -1 # check if the file "qworktree_parse_sub.py" is present ==> parse SubFolder (force and add +1 in the resursing if base_name + "ParseSubFolders.txt" in list_files: debug.debug("find SubParser ... " + str(base_name + "ParseSubFolders.txt") + " " + path) data_file_sub = tools.file_read_data( os.path.join(path, base_name + "ParseSubFolders.txt")) if data_file_sub == "": debug.debug( " Empty file Load all subfolder in the worktree in '" + str(path) + "'") need_parse_sub_folder = True rm_value = 0 else: list_sub = data_file_sub.split("\n") debug.debug(" Parse selected folders " + str(list_sub) + " no parse local folder directory") need_parse_sub_folder = False for folder in list_sub: if folder == "" \ or folder == "/": continue tmp_out = import_path_local(os.path.join(path, folder), 1, exclude_path, base_name) # add all the elements: for elem in tmp_out: out.append(elem) if need_parse_sub_folder == True: list_folders = filter_path(path, list_files) for folder in list_folders: tmp_out = import_path_local(os.path.join(path, folder), limit_sub_folder - rm_value, exclude_path, base_name) # add all the elements: for elem in tmp_out: out.append(elem) return out
def push_video_file(_path, _basic_key={}): file_name, file_extension = os.path.splitext(_path) debug.info("Send file: '" + file_name + "' with extention " + file_extension) # internal file_extension ... if file_extension == "sha512": debug.verbose("file: '" + _path + "' sha512 extention ...") return True debug.info("Add media : '" + _path + "'") if file_extension[1:] not in ["avi", "mkv", "mov", "mp4", "ts"] \ and file_name not in ["cover_1.jpg","cover_1.png", "cover_1.till", "cover_1.bmp", "cover_1.tga"]: debug.warning("Not send file : " + _path + " Not manage file_extension... " + file_extension) return False if file_name in [ "cover_1.jpg", "cover_1.png", "cover_1.till", "cover_1.bmp", "cover_1.tga" ]: # find a cover... debug.warning("Not send cover Not managed ... : " + _path + " Not manage ...") """ debug.info("Send cover for: " + _basic_key["series-name"] + " " + _basic_key["saison"]); if _basic_key["series-name"] == "": debug.error(" ==> can not asociate at a specific seri"); return False; etk::String groupName = _basic_key["series-name"]; if _basic_key["saison"] != "": groupName += ":" + _basic_key["saison"]; auto sending = _srv.setGroupCover(zeus::File::create(_path.getString(), ""), groupName); sending.onSignal(progressCallback); sending.waitFor(echrono::seconds(20000)); """ return True """ if etk::path::exist(_path + ".sha512") == True: debug.verbose("file sha512 exist ==> read it"); uint64_t time_sha512 = get_modify_time(_path + ".sha512"); uint64_t time_elem = get_modify_time(_path); storedSha512_file = file_read_data(_path + ".sha512") debug.verbose("file sha == " + storedSha512_file); if time_elem > time_sha512: debug.verbose("file time > sha time ==> regenerate new one ..."); # check the current sha512 storedSha512 = calculate_sha512(_path); debug.verbose("calculated new sha'" + storedSha512 + "'"); if storedSha512_file != storedSha512: # need to remove the old sha file auto idFileToRemove_fut = _srv.getId(storedSha512_file).waitFor(echrono::seconds(2)); if idFileToRemove_fut.hasError() == True: debug.error("can not remove the remote file with sha " + storedSha512_file); else: debug.info("Remove old deprecated file: " + storedSha512_file); _srv.remove(idFileToRemove_fut.get()); # note, no need to wait the call is async ... and the user does not interested with the result ... # store new sha512 ==> this update tile too ... file.open(etk::io::OpenMode::Write); file.writeAll(storedSha512); file.close(); else: # store new sha512 /* storedSha512 = file.readAllString(); file.open(etk::io::OpenMode::Read); file.writeAll(storedSha512); file.close(); */ storedSha512 = storedSha512_file; debug.verbose("read all sha from the file'" + storedSha512 + "'"); else: """ """ if True: storedSha512 = calculate_sha512(_path) file_write_data(_path + ".sha512", storedSha512); debug.info("calculate and store sha512 '" + storedSha512 + "'"); debug.info("check file existance: sha='" + storedSha512 + "'"); """ # push only if the file exist """ # TODO : Check the metadata updating ... auto idFile_fut = _srv.getId(storedSha512).waitFor(echrono::seconds(2)); if idFile_fut.hasError() == False: # media already exit ==> stop here ... return True; # TODO: Do it better ==> add the calback to know the push progression ... debug.verbose("Add File : " + _path + " sha='" + storedSha512 + "'"); auto sending = _srv.add(zeus::File::create(_path, storedSha512)); sending.onSignal(progressCallback); debug.verbose("Add done ... now waiting ... "); uint32_t mediaId = sending.waitFor(echrono::seconds(20000)).get(); debug.verbose("END WAITING ... "); if mediaId == 0: debug.error("Get media ID = 0 With no error"); return False; """ mime = magic.Magic(mime=True) mime_type = mime.from_file(_path) headers_values = {'filename': _path, 'mime-type': mime_type} result_send_data = requests.post("http://127.0.0.1:15080/data", headers=headers_values, data=upload_in_chunks(_path, chunksize=4096)) debug.info("result *********** : " + str(result_send_data) + " " + result_send_data.text) file_name = os.path.basename(file_name) debug.info("Find file_name : '" + file_name + "'") # Remove Date (XXXX) or other titreadsofarle file_name, dates = extract_and_remove(file_name, '(', ')') have_date = False have_Title = False for it in dates: if len(it) == 0: continue if it[0] == '0' \ or it[0] == '1' \ or it[0] == '2' \ or it[0] == '3' \ or it[0] == '4' \ or it[0] == '5' \ or it[0] == '6' \ or it[0] == '7' \ or it[0] == '8' \ or it[0] == '9': # find a date ... if have_date == True: debug.info(" '" + file_name + "'") debug.error("Parse Date error : () : " + it + " ==> multiple date") continue have_date = True _basic_key["date"] = it else: if have_Title == True: debug.info(" '" + file_name + "'") debug.error("Parse Title error : () : " + it + " ==> multiple title") continue have_Title = True # Other title _basic_key.set["title2"] = it # Remove the actors [XXX YYY][EEE TTT]... file_name, actors = extract_and_remove(file_name, '[', ']') if len(actors) > 0: debug.info(" '" + file_name + "'") actor_list = [] for it_actor in actors: if actor_list != "": actor_list += ";" actor_list.append(it_actor) _basic_key["actors"] = actor_list list_element_base = file_name.split('-') debug.warning("==> Title file: " + file_name) debug.warning("==> Title cut : " + str(list_element_base)) list_element = [] tmp_start_string = "" iii = 0 while iii < len(list_element_base): if list_element_base[iii][0] != 's' \ and list_element_base[iii][0] != 'e': if tmp_start_string != "": tmp_start_string += '-' tmp_start_string += list_element_base[iii] else: list_element.append(tmp_start_string) tmp_start_string = "" while iii < len(list_element_base): list_element.append(list_element_base[iii]) iii += 1 iii += 1 debug.warning("==> start elem: " + str(tmp_start_string)) if tmp_start_string != "": list_element.append(tmp_start_string) debug.warning("==> list_element : " + str(list_element)) if len(list_element) == 1: # nothing to do , it might be a film ... _basic_key["title"] = list_element[0] else: if len(list_element) > 3 \ and list_element[1][0] == 's' \ and list_element[2][0] == 'e': debug.warning("Parse format: xxx-sXX-eXX-kjhlkjlkj(1234).*") # internal formalisme ... saison = -1 episode = -1 series_name = list_element[0] _basic_key["series-name"] = series_name full_episode_name = list_element[3] for yyy in range(4, len(list_element)): full_episode_name += "-" + list_element[yyy] _basic_key["title"] = full_episode_name if list_element[1][1:] == "XX": # saison unknow ... ==> nothing to do ... #saison = 123456789; pass else: saison = int(list_element[1][1:]) if list_element[2][1:] == "XX": # episode unknow ... ==> nothing to do ... pass else: episode = int(list_element[2][1:]) _basic_key["episode"] = int(episode) debug.info("Find a internal mode series: :") debug.info(" origin : '" + file_name + "'") saisonPrint = "XX" episodePrint = "XX" if saison < 0: # nothing to do pass else: saisonPrint = str(saison) _basic_key["saison"] = saison if episode < 0: # nothing to do pass elif episode < 10: episodePrint = "0" + str(episode) _basic_key["episode"] = episode else: episodePrint = str(episode) _basic_key["episode"] = episode debug.info(" ==> '" + series_name + "-s" + saisonPrint + "-e" + episodePrint + "-" + full_episode_name + "'") elif len(list_element) > 2 \ and list_element[1][0] == 'e': debug.warning("Parse format: xxx-eXX-kjhlkjlkj(1234).*") # internal formalisme ... saison = -1 episode = -1 series_name = list_element[0] _basic_key["series-name"] = series_name full_episode_name = list_element[2] for yyy in range(3, len(list_element)): full_episode_name += "-" + list_element[yyy] _basic_key["title"] = full_episode_name if list_element[1][1:] == "XX": # episode unknow ... ==> nothing to do ... pass else: episode = int(list_element[1][1:]) _basic_key["episode"] = int(episode) debug.info("Find a internal mode series: :") debug.info(" origin : '" + file_name + "'") saisonPrint = "XX" episodePrint = "XX" if episode < 0: # nothing to do pass elif episode < 10: episodePrint = "0" + str(episode) _basic_key["episode"] = episode else: episodePrint = str(episode) _basic_key["episode"] = episode debug.info(" ==> '" + series_name + "-s" + saisonPrint + "-e" + episodePrint + "-" + full_episode_name + "'") result_send_data_json = json.loads(result_send_data.text) debug.info("pared meta data: " + json.dumps(_basic_key, sort_keys=True, indent=4)) data_model = { "type_id": _basic_key["type"], "sha512": result_send_data_json["sha512"], #"group_id": int, "name": _basic_key["title"], # number of second "time": None, } for elem in ["date", "description", "episode" ]: #["actors", "date", "description", "episode", "title2"]: if elem in _basic_key.keys(): data_model[elem] = _basic_key[elem] if "series-name" in _basic_key.keys(): result_group_data = requests.post( "http://127.0.0.1:15080/group/find", data=json.dumps({"name": _basic_key["series-name"]}, sort_keys=True, indent=4)) debug.info("Create group ??? *********** : " + str(result_group_data) + " " + result_group_data.text) if result_group_data.status_code == 404: result_group_data = requests.post( "http://127.0.0.1:15080/group", data=json.dumps({"name": _basic_key["series-name"]}, sort_keys=True, indent=4)) debug.info("yes we create new group *********** : " + str(result_group_data) + " " + result_group_data.text) group_id = result_group_data.json()["id"] data_model["group_id"] = group_id if "saison" in _basic_key.keys(): result_saison_data = requests.post( "http://127.0.0.1:15080/saison/find", data=json.dumps( { "number": _basic_key["saison"], "group_id": group_id }, sort_keys=True, indent=4)) debug.info("Create saison ??? *********** : " + str(result_saison_data) + " " + result_saison_data.text) if result_saison_data.status_code == 404: result_saison_data = requests.post( "http://127.0.0.1:15080/saison", data=json.dumps( { "number": _basic_key["saison"], "group_id": group_id }, sort_keys=True, indent=4)) debug.info("yes we create new saison *********** : " + str(result_saison_data) + " " + result_saison_data.text) saison_id = result_saison_data.json()["id"] data_model["saison_id"] = saison_id result_send_data = requests.post("http://127.0.0.1:15080/video", data=json.dumps(data_model, sort_keys=True, indent=4)) debug.info("result *********** : " + str(result_send_data) + " " + result_send_data.text) return True
def build(self, name, optionnal=False, actions=[], package_name=None): if len(name.split("?")) != 1\ or len(name.split("@")) != 1: debug.error("need update") if actions == "": actions = ["build"] if actions == []: actions = ["build"] if type(actions) == str: actions = [actions] if name == "gcov": debug.info("gcov all") debug.error("must set the gcov parsing on a specific library or binary ==> not supported now for all") if name == "dump": debug.info("dump all") self.load_all() for mod in self.module_list: mod.display() return if name[:10] == "dependency": if len(name) > 10: rules = name.split(":")[1] else: rules = "LBDPK" # L for library # B for binary # D for Data # P for prebuild # K for package debug.print_element("dot", "", "---", "dependency.dot") self.load_all() tmp_file = open("dependency.dot", 'w') tmp_file.write('digraph G {\n') tmp_file.write(' rankdir=\"LR\";\n') for mod in self.module_list: mod.dependency_generate(self, tmp_file, 1, rules) # TODO : do it better ==> system library hook (do a oad of all avillable system library) tmp_file.write(' node [\n'); tmp_file.write(' shape=square;\n'); tmp_file.write(' style=filled;\n'); tmp_file.write(' color=gray;\n'); tmp_file.write(' ];\n'); # TODO : End hook for mod in self.module_list: mod.dependency_generate(self, tmp_file, 2, rules) tmp_file.write('}\n') tmp_file.flush() tmp_file.close() debug.print_element("dot", "", "---", "dependency.dot") return if name == "all": debug.info("build all") self.load_all() for mod in self.module_list: if self._name == "Android": if mod.get_type() == "PACKAGE": mod.build(self, package_name) else: if mod.get_type() == "BINARY" \ or mod.get_type() == "PACKAGE": mod.build(self, package_name) elif name == "clean": debug.info("clean all") self.load_all() for mod in self.module_list: mod.clean(self) else: module_name = name action_list = actions for action_name in action_list: debug.verbose("requested : " + module_name + "?" + action_name + " [START]") ret = None; if action_name == "install": try: self.install_package(module_name) except AttributeError: debug.error("target have no 'install_package' instruction") elif action_name == "uninstall": try: self.un_install_package(module_name) except AttributeError: debug.error("target have no 'un_install_package' instruction") elif action_name[:3] == "run": """ if mod.get_type() != "BINARY" \ and mod.get_type() != "PACKAGE": debug.error("Can not run other than 'BINARY' ... pakage='" + mod.get_type() + "' for module='" + module_name + "'") """ bin_name = None if len(action_name) > 3: if action_name[3] == '%': bin_name = "" for elem in action_name[4:]: if elem == ":": break; bin_name += elem # we have option: action_name2 = action_name.replace("\:", "1234COLUMN4321") option_list = action_name2.split(":") if len(option_list) == 0: if bin_name != None: debug.warning("action 'run' wrong options options ... : '" + action_name + "' might be separate with ':'") option_list = [] else: option_list_tmp = option_list[1:] option_list = [] for elem in option_list_tmp: option_list.append(elem.replace("1234COLUMN4321", ":")) else: option_list = [] #try: self.run(module_name, option_list, bin_name) #except AttributeError: # debug.error("target have no 'run' instruction") elif action_name == "log": try: self.show_log(module_name) except AttributeError: debug.error("target have no 'show_log' instruction") else: present = self.load_if_needed(module_name, optionnal=optionnal) if present == False \ and optionnal == True: ret = [heritage.HeritageList(), False] else: for mod in self.module_list: debug.verbose("compare " + mod.get_name() + " == " + module_name) if mod.get_name() == module_name: if action_name[:4] == "dump": debug.info("dump module '" + module_name + "'") if len(action_name) > 4: debug.warning("action 'dump' does not support options ... : '" + action_name + "'") ret = mod.display() break elif action_name[:5] == "clean": debug.info("clean module '" + module_name + "'") if len(action_name) > 5: debug.warning("action 'clean' does not support options ... : '" + action_name + "'") ret = mod.clean(self) break elif action_name[:4] == "gcov": debug.debug("gcov on module '" + module_name + "'") if len(action_name) > 4: # we have option: option_list = action_name.split(":") if len(option_list) == 0: debug.warning("action 'gcov' wrong options options ... : '" + action_name + "' might be separate with ':'") option_list = [] else: option_list = option_list[1:] else: option_list = [] if "output" in option_list: ret = mod.gcov(self, generate_output=True) else: ret = mod.gcov(self, generate_output=False) break elif action_name[:5] == "build": if len(action_name) > 5: debug.warning("action 'build' does not support options ... : '" + action_name + "'") debug.debug("build module '" + module_name + "'") if optionnal == True: ret = [mod.build(self, package_name), True] else: ret = mod.build(self, package_name) break if optionnal == True \ and ret == None: ret = [heritage.HeritageList(), False] break if ret == None: debug.error("not know module name : '" + module_name + "' to '" + action_name + "' it") debug.verbose("requested : " + module_name + "?" + action_name + " [STOP]") if len(action_list) == 1: return ret
def parse(self, start_position_parsing=1): list_argument = [] # composed of list element not_parse_next_element = False for iii in range(start_position_parsing, len(sys.argv)): self._last_element_parsed = iii # special case of parameter in some elements if not_parse_next_element == True: not_parse_next_element = False continue debug.verbose("parse [" + str(iii) + "]=" + sys.argv[iii]) argument = sys.argv[iii] # check if we get a stop parsing element: if argument in self._list_element_stop: debug.warning("stop at position: " + str(iii)) list_argument.append(ArgElement("", argument)) break optionList = argument.split("=") debug.verbose(str(optionList)) if type(optionList) == type(str()): option = optionList else: option = optionList[0] optionParam = argument[len(option) + 1:] debug.verbose(option) argument_found = False if option[:2] == "--": # big argument for prop in self.list_properties: if prop.is_parsable() == False: continue if prop.get_option_big() == "": continue if prop.get_option_big() == option[2:]: # find it debug.verbose("find argument 2 : " + option[2:]) if prop.need_parameters() == True: internalSub = option[2 + len(prop.get_option_big()):] if len(internalSub) != 0: if len(optionParam) != 0: # wrong argument ... debug.warning( "maybe wrong argument for : '" + prop.get_option_big() + "' cmdLine='" + argument + "'") prop.display() continue optionParam = internalSub if len(optionParam) == 0: #Get the next parameters if len(sys.argv) > iii + 1: optionParam = sys.argv[iii + 1] not_parse_next_element = True else: # missing arguments debug.warning( "parsing argument error : '" + prop.get_option_big() + "' Missing : subParameters ... cmdLine='" + argument + "'") prop.display() exit(-1) if prop.check_availlable(optionParam) == False: debug.warning( "argument error : '" + prop.get_option_big() + "' SubParameters not availlable ... cmdLine='" + argument + "' option='" + optionParam + "'") prop.display() exit(-1) list_argument.append( ArgElement(prop.get_option_big(), optionParam)) argument_found = True else: if len(optionParam) != 0: debug.warning("parsing argument error : '" + prop.get_option_big() + "' need no subParameters : '" + optionParam + "' cmdLine='" + argument + "'") prop.display() list_argument.append( ArgElement(prop.get_option_big())) argument_found = True break if argument_found == False: debug.error("UNKNOW argument : '" + argument + "'") elif option[:1] == "-": # small argument for prop in self.list_properties: if prop.is_parsable() == False: continue if prop.get_option_small() == "": continue if prop.get_option_small( ) == option[1:1 + len(prop.get_option_small())]: # find it debug.verbose("find argument 1 : " + option[1:1 + len(prop.get_option_small())]) if prop.need_parameters() == True: internalSub = option[1 + len(prop.get_option_small()):] if len(internalSub) != 0: if len(optionParam) != 0: # wrong argument ... debug.warning( "maybe wrong argument for : '" + prop.get_option_big() + "' cmdLine='" + argument + "'") prop.display() continue optionParam = internalSub if len(optionParam) == 0: #Get the next parameters if len(sys.argv) > iii + 1: optionParam = sys.argv[iii + 1] not_parse_next_element = True else: # missing arguments debug.warning( "parsing argument error : '" + prop.get_option_big() + "' Missing : subParameters cmdLine='" + argument + "'") prop.display() exit(-1) if prop.check_availlable(optionParam) == False: debug.warning( "argument error : '" + prop.get_option_big() + "' SubParameters not availlable ... cmdLine='" + argument + "' option='" + optionParam + "'") prop.display() exit(-1) list_argument.append( ArgElement(prop.get_option_big(), optionParam)) argument_found = True else: if len(optionParam) != 0: debug.warning("parsing argument error : '" + prop.get_option_big() + "' need no subParameters : '" + optionParam + "' cmdLine='" + argument + "'") prop.display() list_argument.append( ArgElement(prop.get_option_big())) argument_found = True break if argument_found == False: # small argument for prop in self.list_properties: if prop.is_parsable() == True \ or prop.get_option_big() == "": continue if prop.need_parameters() == True: list_argument.append( ArgElement(prop.get_option_big(), argument)) argument_found = True break if argument_found == False: #unknow element ... ==> just add in the list ... debug.verbose("unknow argument : " + argument) list_argument.append(ArgElement("", argument)) for prop in self.list_properties: if prop.is_parsable() == True \ or prop.get_option_big() == "": continue if prop.need_parameters() == True \ and prop.optionnal == False: debug.error("Missing argument:" + prop.get_option_big()) #for argument in list_argument: # argument.display() #exit(0) return list_argument
def remove_path_and_sub_path(path): if os.path.isdir(path): debug.verbose("remove path : '" + path + "'") shutil.rmtree(path)
def __init__(self, target): system.System.__init__(self) # create some HELP: self.set_help("PULSE : The Linux PulseAudio\n Can be install with the package:\n - libpulse-dev") # check if the library exist: if not os.path.isfile("/usr/include/pulse/pulseaudio.h"): # we did not find the library reqiested (just return) (automaticly set at false) return; dst_data = tools.file_read_data("/usr/include/pulse/version.h") lines = dst_data.split("\n") patern = "#define pa_get_headers_version() (\"" # " #corect edn error parsing version = None for line in lines: if line[:len(patern)] == patern: #Find the version line offset = len(patern) version = "" while offset < len(line) \ and line[offset] != '.': version += line[offset] offset += 1 offset += 1 version2 = "" while offset < len(line) \ and line[offset] != '.': version2 += line[offset] offset += 1 debug.verbose("detect version '" + version + "'") break; if version == None: debug.warning("Can not det version of Pulseaudio ... ==> remove it") return self.set_version([int(version),int(version2)]) self.set_valid(True) self.add_depend([ 'c' ]) if env.get_isolate_system() == False: self.add_flag("link-lib", [ "pulse-simple", "pulse" ]) else: # todo : create a searcher of the presence of the library: """ self.add_flag("link-lib", [ "-l/lib/pulseaudio/libpulsecommon-" + version + ".0.so" ]) """ self.add_flag("link-lib", [ "pulsecommon-" + version + ".0", "pulse-mainloop-glib", "pulse-simple", "pulse" ]) self.add_flag("link", "-L/usr/lib/pulseaudio") self.add_flag("link", "-Wl,-R/usr/lib/pulseaudio") self.add_header_file([ "/usr/include/pulse/*", ], destination_path="pulse", recursive=True)
def make_package_binary_bin(self, path_package, pkg_name, base_pkg_path, heritage_list, static): debug.debug("make_package_binary_bin [START]") copy_list={} # creata basic output path path_package_bin = os.path.join(path_package, self.pkg_path_bin) tools.create_directory_of_file(path_package_bin) # Local module binary path_src = self.get_build_file_bin(pkg_name, static) if os.path.exists(path_src) == True: try: path_dst = os.path.join(path_package_bin, pkg_name + self.suffix_binary) debug.verbose("path_dst: " + str(path_dst)) tools.copy_file(path_src, path_dst, in_list=copy_list) except: debug.extreme_verbose("can not find : " + path_src) pass path_src = self.get_build_file_bin(pkg_name, static) path_src = path_src[:len(path_src)-4] + "js" if os.path.exists(path_src) == True: try: path_dst = os.path.join(path_package_bin, pkg_name + self.suffix_binary2) debug.verbose("path_dst: " + str(path_dst)) tools.copy_file(path_src, path_dst, in_list=copy_list) except: debug.extreme_verbose("can not find : " + path_src) pass # heritage binary debug.debug("heritage for " + str(pkg_name) + ":") for heritage in heritage_list.list_heritage: debug.debug("sub elements: " + str(heritage.name)) path_src = self.get_build_file_bin(heritage.name, static) if os.path.exists(path_src) == True: try: path_dst = os.path.join(path_package_bin, heritage.name + self.suffix_binary) debug.verbose("path_dst: " + str(path_dst)) tools.copy_file(path_src, path_dst, in_list=copy_list) except: debug.extreme_verbose("can not find : " + path_src) pass path_src = self.get_build_file_bin(heritage.name, static) path_src = path_src[:len(path_src)-4] + "js" if os.path.exists(path_src) == True: try: path_dst = os.path.join(path_package_bin, heritage.name + self.suffix_binary2) debug.verbose("path_dst: " + str(path_dst)) tools.copy_file(path_src, path_dst, in_list=copy_list) except: debug.extreme_verbose("can not find : " + path_src) pass #real copy files ret_copy = tools.copy_list(copy_list) ret_remove = False if self.pkg_path_bin != "": # remove unneded files (NOT folder ...) ret_remove = tools.clean_directory(path_package_bin, copy_list) debug.debug("make_package_binary_bin [STOP]") return ret_copy or ret_remove