def write_libraries(config): INFO("Libraries Data: Processing...") libs_path = os.path.join(config["source"], constants.LIBRARIES_FOLDER) if not os.path.exists(libs_path): CRITICAL("Can't find: {}".format(libs_path)) emergency_exit() write_xml("Libraries", indent=2) files = list(set(os.listdir(libs_path)) - set(constants.RESERVED_NAMES)) for lib_name in sorted(files): lib_path = os.path.join(libs_path, lib_name) if not os.path.isfile(lib_path): continue DEBUG("Open file: %s", lib_path) with open_file(lib_path) as lib_f: write_xml( tagname="Library", attrs={"Name": lib_name.split(".", 1)[0]}, indent=4, data=lib_f.read(), close=True ) write_xml("Libraries", indent=2, closing=True) INFO("Libraries Data: Done!")
def update_pages_resources(self): if not (PARSER.config["parse_all"] or PARSER.config["parse"]["pages"]): return INFO("Parsing: used resources for every page") PARSER.append_to_current_path(constants.PAGES_FOLDER) resources_set = set(RESOURCES.keys()) for page in PARSER.pages.values(): keys = list(resources_set & set(page["guids"])) PARSER.append_to_current_path(page["name"]) PARSER.write_json_file( constants.RESOURCES_FILE, sorted([RESOURCES[key] for key in keys], key=lambda x: x.lower())) PARSER.pop_from_current_path() PARSER.pop_from_current_path() INFO("Completed: used resources for every page")
def update_pages_libraries(self): if not (PARSER.config["parse_all"] or PARSER.config["parse"]["pages"]): return INFO("Parsing: used libraries for every page") PARSER.append_to_current_path(constants.PAGES_FOLDER) libraries_set = set(LIBRARIES) for page in PARSER.pages.values(): libs = list(libraries_set & set(page["libraries"])) PARSER.append_to_current_path(page["name"]) PARSER.write_json_file(constants.LIBRARIES_FILE, sorted(libs, key=lambda x: x.lower())) PARSER.pop_from_current_path() PARSER.pop_from_current_path() INFO("Completed: used libraries for every page")
def replace_all_guids(config): """ Replace all guids in application """ INFO("Replace all GUIDs in application") INFO("GUIDs to replace - %s", len(GUIDS_TO_REPLACE)) if GUIDS_TO_REPLACE: regexp = RE_OBJ_UUID # function for guids replacement sub_func = sub_chain_func([re_res_sub(GUIDS_TO_REPLACE)]) for cwd, dirs, files in os.walk(config["target"]["path"]): if files: DEBUG("Replace GUIDs in directory '%s'", cwd) for node in sorted(files): node_path = os.path.join(cwd, node) DEBUG(" - Replace in file %s", node_path) with open(node_path, "rb") as src: data = src.read() with open(node_path, "wb") as dst: dst.write(regexp.sub(sub_func, data)) INFO("GUIDs successfully replaced")
def copy_databases(config): """Copy databases """ if "Databases" not in config: INFO("No information about databases") return DEBUG("Collect databases info") target_path = os.path.join(config["target"]["path"], constants.DATABASES_FOLDER) DEBUG("Copy databases") sources = config["Databases"] if not isinstance(sources, (list, tuple)): sources = (sources, ) for source in sources: files = copy_files(target_path, source, config) change_guids = False if isinstance(source, dict): change_guids = bool(source.get("generateGUIDs", False)) if not change_guids: continue for old_name in files: raw_name = old_name.split("_", 1) res_type = "sqlite" res_guid = res_name = "" try: res_guid = str(UUID(raw_name[0])).lower() except ValueError: res_guid = gen_guid() res_name = old_name.rsplit(".", 1)[0] else: res_name = raw_name[1].rsplit(".", 1)[0] new_guid = GUIDS_TO_REPLACE[res_guid] = gen_guid() new_name = "{}_{}.{}".format(new_guid, res_name, res_type) old_path = os.path.join(target_path, old_name) new_path = os.path.join(target_path, new_name) DEBUG("Move '%s' to '%s'", old_path, new_path) shutil.move(old_path, new_path) # copy_files(target_path, config["Databases"], config) INFO("Databases were copied successfully")
def spawnContainers(c, fullLowerDir): for ip in genIPlist(c.cntCommon): container = Container(ip, fullLowerDir, c) printInfo("***") print(INFO("Trying to start container " + container.cName)) container.mountOverlayFS() container.spawn() container.checkStatus(full=False) print(INFO("Adding fingerprint to ssh known_hosts ")) container.checkSSH()
def main(): """Main function """ args_parser = argparse.ArgumentParser() args_parser.add_argument("cfg", type=argparse.FileType("rb"), help="configuration file") args_parser.add_argument("target", type=str, help="target folder") args_parser.add_argument("-v", "--verbosity", action="count", help="be more verbose", default=0) args_parser.add_argument("-e", "--erase", action="store_true", help="erase target folder") args_parser.add_argument("-q", "--quiet", action="store_true", help="no user interaction") args = args_parser.parse_args() # Setup logging system and show necessary messages setup_logging(logging.INFO if args.verbosity == 0 else logging.DEBUG, module_name=True if args.verbosity > 1 else False) INFO("Information logging turned on") DEBUG("Debug logging turned on") # Parsing config file DEBUG("Parsing config file") config = json_load(args.cfg, critical=True) INFO("Config file parsed successfully") config["target"] = { "path": args.target, "erase": args.erase, "quiet": args.quiet, } # Main process starting make(config) INFO("") INFO("+" * 70) INFO("\nPath to application:\n{}".format(config["target"]["path"]))
def make(config): """Call copy functions in cycle """ # Create child folders for func in (create_basic_structure, copy_resources, copy_databases, copy_libraries, copy_security, copy_app_actions, copy_pages, create_application_info_file, replace_all_guids): INFO("") INFO("+" * 70) INFO("") func(config)
def parse_app(config): """VDOM Application XML parser initialization and start parsing process """ global PARSER DEBUG("Initialize VDOM Application XML parser") PARSER = Parser() INFO("Parsing started...") PARSER.parse(config["source"], config["target"]["path"], config) INFO("Completed!")
def write_resources(config): INFO("Resources Data: Processing...") resources_path = os.path.join(config["source"], constants.RESOURCES_FOLDER) if not os.path.exists(resources_path): CRITICAL("Can't find: {}".format(resources_path)) emergency_exit() write_xml("Resources", indent=2) files = list(set(os.listdir(resources_path)) - set(constants.RESERVED_NAMES)) for res_name in sorted(files): res_path = os.path.join(resources_path, res_name) if not os.path.isfile(res_path): continue raw_name = res_name.split("_", 2) try: res_guid = UUID(raw_name[0]) except ValueError: res_guid = gen_guid() res_type = res_name.rsplit(".", 1) res_type = res_type[1] if len(res_type) == 2 else "res" else: res_type = raw_name[1] res_name = raw_name[2] attrs = { "ID": res_guid, "Name": res_name, "Type": res_type } DEBUG("Open file: %s", res_path) with open_file(res_path) as res_f: write_xml( tagname="Resource", attrs=attrs, indent=4, data=base64.b64encode(res_f.read()), close=True ) write_xml("Resources", indent=2, closing=True) INFO("Resources Data: Done!")
def create_basic_structure(config): """Create basic folders """ DEBUG("Creating basic structure") root = config["target"]["path"] = create_folder(**config["target"]) if config["parse_all"]: for folder in constants.BASE_FOLDERS: create_folder(os.path.join(root, folder)) else: if config["parse"]["databases"]: create_folder(os.path.join(root, constants.DATABASES_FOLDER)) if config["parse"]["libraries"]: create_folder(os.path.join(root, constants.LIBRARIES_FOLDER)) if config["parse"]["pages"]: create_folder(os.path.join(root, constants.PAGES_FOLDER)) if config["parse"]["resources"]: create_folder(os.path.join(root, constants.RESOURCES_FOLDER)) if config["parse"]["security"]: create_folder(os.path.join(root, constants.SECURITY_FOLDER)) if config["parse"]["app_actions"]: create_folder(os.path.join(root, constants.APP_ACTIONS_FOLDER)) INFO("Basic structure successfully created")
def end(self): super(PagesTagHandler, self).end() PARSER.pop_from_current_path() INFO("Completed: Pages") if "current" in PARSER.pages: del PARSER.pages["current"]
def write_databases(config): INFO("Databases Data: Processing...") dbs_path = os.path.join(config["source"], constants.DATABASES_FOLDER) if not os.path.exists(dbs_path): DEBUG("Can't find: {}".format(dbs_path)) return write_xml("Databases", indent=2) files = list(set(os.listdir(dbs_path)) - set(constants.RESERVED_NAMES)) for db_name in sorted(files): db_path = os.path.join(dbs_path, db_name) if not os.path.isfile(db_path): continue raw_name = db_name.split("_", 1) try: db_guid = UUID(raw_name[0]) except ValueError: db_guid = gen_guid() raw_name = raw_name[-1].split(".", 1) db_name = raw_name[0] db_type = raw_name[1] if len(raw_name) == 2 else "sqlite" attrs = { "ID": db_guid, "Name": db_name, "Type": db_type } DEBUG("Open file: %s", db_path) with open_file(db_path) as db_f: write_xml( tagname="Database", attrs=attrs, indent=4, data=base64.b64encode(db_f.read()), close=True ) write_xml("Databases", indent=2, closing=True) INFO("Databases Data: Done!")
def write_app_info(config): INFO("Application Information Data: Processing...") info_path = os.path.join(config["source"], constants.INFO_FILE) with open_file(info_path) as info_file: info_json = json_load(info_file, critical=True) write_xml("Information", indent=2) for tagname, value in info_json.items(): write_xml(tagname, data=value, close=True, indent=4) write_xml("Information", indent=2, closing=True) INFO("Application Information Data: Done!")
def end(self): super(E2vdomTagHandler, self).end() for page in PARSER.pages.values(): for act_id in page["actions"]: page["actions"][act_id] = self.actions.get(act_id, '') self.save() INFO("Completed: E2VDOM")
def copy_libraries(config): """Copy libraries """ if "Libraries" not in config: INFO("No information about libraries") return DEBUG("Collect libraries info") target_path = os.path.join(config["target"]["path"], constants.LIBRARIES_FOLDER) DEBUG("Copy libraries") copy_files(target_path, config["Libraries"], config) INFO("Libraries were copied successfully")
def checkStatus(self, full=True): print(INFO("Checking container {0}, ip {1}/{2}".\ format(self.cName, self.ip, self.netPrefix))) self.ctlStatus() if full: self.checkOverlayFS() self.checkLeader() self.checkPing() self.checkSSH()
def copy_security(config): """Copy security section """ if "Security" not in config: INFO("No information about security settings") return DEBUG("Collect security info") target_path = os.path.join(config["target"]["path"], constants.SECURITY_FOLDER) DEBUG("Copy security settings") copy_files(target_path, config["Security"], config) INFO("Security settings were copied successfully")
def end(self): global ACTION_EXT # remove unnecessary symbols from data and encode it for key, value in self.data.items(): self.data[key] = encode(clean_data("".join(value))) # detect application programming language ACTION_EXT = { "python": ".py", "vscript": ".vb" }.get(self.data["ScriptingLanguage"].lower(), "python") INFO("Sripts extention will be '*%s'", ACTION_EXT) INFO("Completed: Application Information") self.save() super(InformationTagHandler, self).end()
def copy_app_actions(config): """Copy application actions """ if "Actions" not in config: INFO("No information about application actions") return DEBUG("Collect application actions info") target_path = os.path.join(config["target"]["path"], constants.APP_ACTIONS_FOLDER) DEBUG("Copy application actions") copy_files(target_path, config["Actions"], config) INFO("Application actions were copied successfully")
def create_basic_structure(config): """Create basic folders """ DEBUG("Creating basic structure") root = config["target"]["path"] = create_folder(**config["target"]) for folder in constants.BASE_FOLDERS: create_folder(os.path.join(root, folder)) INFO("Basic structure successfully created")
def write_pages(config): INFO("Pages Data: Processing...") pages_path = os.path.join(config["source"], constants.PAGES_FOLDER) if not os.path.exists(pages_path): CRITICAL("Can't find: {}".format(pages_path)) emergency_exit() write_xml("Objects", indent=2) for page in sorted(os.listdir(pages_path)): walk(pages_path, page, indent=4) write_xml("Objects", indent=2, closing=True) actions_path = os.path.join(config["source"], constants.APP_ACTIONS_FOLDER) write_actions(actions_path, 2) INFO("Pages Data: Done!")
def write_structure(config): INFO("Structure Data: Processing...") structure_path = os.path.join(config["source"], constants.STRUCT_FILE) if not os.path.exists(structure_path): ERROR("Can't find: {}".format(structure_path)) write_xml("Structure", indent=2, close=True) return write_xml("Structure", indent=2) with open_file(structure_path) as struct_file: struct_json = json_load(struct_file, critical=True) for obj in struct_json: write_xml("Object", attrs=obj, data="", close=True, indent=4) write_xml("Structure", indent=2, closing=True) INFO("Structure Data: Done!")
def rmWorkingDirs(self): step = "Removing content of working directories" wd = self.c.workingDirectory for key in self.c.getKeys(): path = getattr(self.c, key) if key == "workingDirectory": continue if wd in path: if os.path.isdir(path): msg, ec = exec_command("sudo rm -rf {0}/*".format(path)) print(OK(path + " was cleaned up")) else: print(INFO("{} is not under parent working directory," \ " refusing to remove it's cintent").format(path)) return True, "", step
def checkOverlayFS(self): step = "Checking overlayFS mount" msg, ec = exec_command("mount | grep " + self.cName) failed = False for key in self.pConfig.getKeys(): if key in ['localStore', 'workingDirectory']: continue path = getattr(self.pConfig, key) if path not in msg: print(INFO("Not in OFS mount: " + path)) failed = True break if failed: print(FAIL(step)) else: print(OK(step))
def fetch(self): res, msg = False, True try: backupExistingFile(self.dest) step = "Downloading image {0} (size {1}M) " \ "to {2} (free space {3}M)".format(self.imageName, self.size, self.lStore, self.freeSpace) print(INFO("URL: " + self.src)) urlretrieve(self.src, self.dest) res = True except Exception as e: msg = e.message return res, msg, step
def write_actions(path, indent): actions_map_path = os.path.join(path, constants.MAP_FILE) if not os.path.exists(actions_map_path): INFO("Can't find: %s; skipping Actions", actions_map_path) write_xml("Actions", indent=indent) write_xml("Actions", indent=indent, closing=True) return with open_file(actions_map_path) as actions_map_file: actions_map = json_load(actions_map_file, critical=True) write_xml("Actions", indent=indent) for action_name in sorted(os.listdir(path)): action_path = os.path.join(path, action_name) if not os.path.isfile(action_path) or \ action_name in constants.RESERVED_NAMES: continue attrs = actions_map.get(action_name, None) if not attrs: attrs = { "Top": "", "State": "", "Left": "", "ID": str(gen_guid()), "Name": action_name.split(".", 1)[0], } with open_file(action_path) as action_f: write_xml( tagname="Action", attrs=attrs, indent=indent+2, data=action_f.read(), close=True, force_cdata=True ) write_xml("Actions", indent=indent, closing=True)
def create_application_info_file(config): """Create __info__.json in root directory """ DEBUG("Collect application info") app_info = config.get("ApplicationInfo", {}) # Load existing config file or create empty if "BaseConfigFile" in app_info: with open_file(app_info.pop("BaseConfigFile"), config) as hdlr: info = json_load(hdlr) else: info = dict(ID=gen_guid(), Name="Application", Description="", Owner="-", Active="1", Serverversion="", ScriptingLanguage="python", Icon="") # Update values from config for key, value in app_info.items(): info[key] = value # Generate new GUID if it isn't exiisting if not info.get("ID", ""): info["ID"] = gen_guid() # Write data to file path = os.path.join(config["target"]["path"], constants.INFO_FILE) DEBUG("Writing application info to '%s'", path) with fopen(path, "wb") as hdlr: json_dump(info, hdlr) INFO("Application info successfully written to '%s'", path)
def main(): """Main function """ args_parser = argparse.ArgumentParser() args_parser.add_argument("source", type=str, help="aplication source folder") args_parser.add_argument("target", type=str, help="target XML file") args_parser.add_argument("-v", "--verbosity", action="count", help="be more verbose", default=0) args = args_parser.parse_args() # Setup logging system and show necessary messages setup_logging(logging.INFO if args.verbosity == 0 else logging.DEBUG, module_name=True if args.verbosity > 1 else False) INFO("") INFO("Information logging turned on") DEBUG("Debug logging turned on") INFO("") INFO(BLOCK_END) INFO("") config = { "target": { "path": args.target, }, "source": args.source } # Main process starting build(config) INFO("\nPath to application XML:\n{}".format(config["target"]["path"]))
def end(self): super(StructureTagHandler, self).end() # self.save() INFO("Completed: Structure")