def replace_all_guids(config): """ Replace all guids in application """ INFO("Replace all GUIDs in application") INFO("GUIDs to replace - %s", len(GUIDS_TO_REPLACE)) if GUIDS_TO_REPLACE: regexp = RE_OBJ_UUID # function for guids replacement sub_func = sub_chain_func([re_res_sub(GUIDS_TO_REPLACE)]) for cwd, dirs, files in os.walk(config["target"]["path"]): if files: DEBUG("Replace GUIDs in directory '%s'", cwd) for node in sorted(files): node_path = os.path.join(cwd, node) DEBUG(" - Replace in file %s", node_path) with open(node_path, "rb") as src: data = src.read() with open(node_path, "wb") as dst: dst.write(regexp.sub(sub_func, data)) INFO("GUIDs successfully replaced")
def copy_databases(config): """Copy databases """ if "Databases" not in config: INFO("No information about databases") return DEBUG("Collect databases info") target_path = os.path.join(config["target"]["path"], constants.DATABASES_FOLDER) DEBUG("Copy databases") sources = config["Databases"] if not isinstance(sources, (list, tuple)): sources = (sources, ) for source in sources: files = copy_files(target_path, source, config) change_guids = False if isinstance(source, dict): change_guids = bool(source.get("generateGUIDs", False)) if not change_guids: continue for old_name in files: raw_name = old_name.split("_", 1) res_type = "sqlite" res_guid = res_name = "" try: res_guid = str(UUID(raw_name[0])).lower() except ValueError: res_guid = gen_guid() res_name = old_name.rsplit(".", 1)[0] else: res_name = raw_name[1].rsplit(".", 1)[0] new_guid = GUIDS_TO_REPLACE[res_guid] = gen_guid() new_name = "{}_{}.{}".format(new_guid, res_name, res_type) old_path = os.path.join(target_path, old_name) new_path = os.path.join(target_path, new_name) DEBUG("Move '%s' to '%s'", old_path, new_path) shutil.move(old_path, new_path) # copy_files(target_path, config["Databases"], config) INFO("Databases were copied successfully")
def main(): """Main function """ args_parser = argparse.ArgumentParser() args_parser.add_argument("cfg", type=argparse.FileType("rb"), help="configuration file") args_parser.add_argument("target", type=str, help="target folder") args_parser.add_argument("-v", "--verbosity", action="count", help="be more verbose", default=0) args_parser.add_argument("-e", "--erase", action="store_true", help="erase target folder") args_parser.add_argument("-q", "--quiet", action="store_true", help="no user interaction") args = args_parser.parse_args() # Setup logging system and show necessary messages setup_logging(logging.INFO if args.verbosity == 0 else logging.DEBUG, module_name=True if args.verbosity > 1 else False) INFO("Information logging turned on") DEBUG("Debug logging turned on") # Parsing config file DEBUG("Parsing config file") config = json_load(args.cfg, critical=True) INFO("Config file parsed successfully") config["target"] = { "path": args.target, "erase": args.erase, "quiet": args.quiet, } # Main process starting make(config) INFO("") INFO("+" * 70) INFO("\nPath to application:\n{}".format(config["target"]["path"]))
def create_name(self, attrs): if not check_by_regexps(attrs["Name"], IGNORE["Databases"]): return "{}_{}.{}".format(attrs["ID"], attrs["Name"], attrs["Type"]) else: DEBUG("Ignore database: %s", attrs["Name"]) return ""
def create_basic_structure(config): """Create basic folders """ DEBUG("Creating basic structure") root = config["target"]["path"] = create_folder(**config["target"]) if config["parse_all"]: for folder in constants.BASE_FOLDERS: create_folder(os.path.join(root, folder)) else: if config["parse"]["databases"]: create_folder(os.path.join(root, constants.DATABASES_FOLDER)) if config["parse"]["libraries"]: create_folder(os.path.join(root, constants.LIBRARIES_FOLDER)) if config["parse"]["pages"]: create_folder(os.path.join(root, constants.PAGES_FOLDER)) if config["parse"]["resources"]: create_folder(os.path.join(root, constants.RESOURCES_FOLDER)) if config["parse"]["security"]: create_folder(os.path.join(root, constants.SECURITY_FOLDER)) if config["parse"]["app_actions"]: create_folder(os.path.join(root, constants.APP_ACTIONS_FOLDER)) INFO("Basic structure successfully created")
def write_libraries(config): INFO("Libraries Data: Processing...") libs_path = os.path.join(config["source"], constants.LIBRARIES_FOLDER) if not os.path.exists(libs_path): CRITICAL("Can't find: {}".format(libs_path)) emergency_exit() write_xml("Libraries", indent=2) files = list(set(os.listdir(libs_path)) - set(constants.RESERVED_NAMES)) for lib_name in sorted(files): lib_path = os.path.join(libs_path, lib_name) if not os.path.isfile(lib_path): continue DEBUG("Open file: %s", lib_path) with open_file(lib_path) as lib_f: write_xml( tagname="Library", attrs={"Name": lib_name.split(".", 1)[0]}, indent=4, data=lib_f.read(), close=True ) write_xml("Libraries", indent=2, closing=True) INFO("Libraries Data: Done!")
def write_databases(config): INFO("Databases Data: Processing...") dbs_path = os.path.join(config["source"], constants.DATABASES_FOLDER) if not os.path.exists(dbs_path): DEBUG("Can't find: {}".format(dbs_path)) return write_xml("Databases", indent=2) files = list(set(os.listdir(dbs_path)) - set(constants.RESERVED_NAMES)) for db_name in sorted(files): db_path = os.path.join(dbs_path, db_name) if not os.path.isfile(db_path): continue raw_name = db_name.split("_", 1) try: db_guid = UUID(raw_name[0]) except ValueError: db_guid = gen_guid() raw_name = raw_name[-1].split(".", 1) db_name = raw_name[0] db_type = raw_name[1] if len(raw_name) == 2 else "sqlite" attrs = { "ID": db_guid, "Name": db_name, "Type": db_type } DEBUG("Open file: %s", db_path) with open_file(db_path) as db_f: write_xml( tagname="Database", attrs=attrs, indent=4, data=base64.b64encode(db_f.read()), close=True ) write_xml("Databases", indent=2, closing=True) INFO("Databases Data: Done!")
def copy_libraries(config): """Copy libraries """ if "Libraries" not in config: INFO("No information about libraries") return DEBUG("Collect libraries info") target_path = os.path.join(config["target"]["path"], constants.LIBRARIES_FOLDER) DEBUG("Copy libraries") copy_files(target_path, config["Libraries"], config) INFO("Libraries were copied successfully")
def create_name(self, attrs): LIBRARIES.append(attrs["Name"]) if not check_by_regexps(attrs["Name"], IGNORE["Libraries"]): return "{}{}".format(attrs["Name"], ACTION_EXT) else: DEBUG("Ignore library: %s", attrs["Name"]) return ""
def write_file(self, name, data): """Write data to file """ path = build_path(self.current_path(), name) DEBUG("Writing data to %s", path) with open_file(path, "wb") as hdlr: hdlr.write(data.encode('utf-8') if type(data) == unicode else data)
def copy_app_actions(config): """Copy application actions """ if "Actions" not in config: INFO("No information about application actions") return DEBUG("Collect application actions info") target_path = os.path.join(config["target"]["path"], constants.APP_ACTIONS_FOLDER) DEBUG("Copy application actions") copy_files(target_path, config["Actions"], config) INFO("Application actions were copied successfully")
def copy_security(config): """Copy security section """ if "Security" not in config: INFO("No information about security settings") return DEBUG("Collect security info") target_path = os.path.join(config["target"]["path"], constants.SECURITY_FOLDER) DEBUG("Copy security settings") copy_files(target_path, config["Security"], config) INFO("Security settings were copied successfully")
def create_name(self, attrs): RESOURCES[attrs["ID"]] = name = "{}_{}_{}".format( attrs["ID"], attrs["Type"] or "res", attrs["Name"]) if not check_by_regexps(attrs["Name"], IGNORE["Resources"]): return name else: DEBUG("Ignore resource: %s", attrs["Name"]) return ""
def write_json_file(self, name, data): """Convert data to JSON and write it to file """ path = build_path(self.current_path(), name) DEBUG("Writing JSON data to %s", path) with open_file(path, "wb") as hdlr: json_dump(data, hdlr, critical=True)
def create_basic_structure(config): """Create basic folders """ DEBUG("Creating basic structure") root = config["target"]["path"] = create_folder(**config["target"]) for folder in constants.BASE_FOLDERS: create_folder(os.path.join(root, folder)) INFO("Basic structure successfully created")
def parse_app(config): """VDOM Application XML parser initialization and start parsing process """ global PARSER DEBUG("Initialize VDOM Application XML parser") PARSER = Parser() INFO("Parsing started...") PARSER.parse(config["source"], config["target"]["path"], config) INFO("Completed!")
def create_application_info_file(config): """Create __info__.json in root directory """ DEBUG("Collect application info") app_info = config.get("ApplicationInfo", {}) # Load existing config file or create empty if "BaseConfigFile" in app_info: with open_file(app_info.pop("BaseConfigFile"), config) as hdlr: info = json_load(hdlr) else: info = dict(ID=gen_guid(), Name="Application", Description="", Owner="-", Active="1", Serverversion="", ScriptingLanguage="python", Icon="") # Update values from config for key, value in app_info.items(): info[key] = value # Generate new GUID if it isn't exiisting if not info.get("ID", ""): info["ID"] = gen_guid() # Write data to file path = os.path.join(config["target"]["path"], constants.INFO_FILE) DEBUG("Writing application info to '%s'", path) with fopen(path, "wb") as hdlr: json_dump(info, hdlr) INFO("Application info successfully written to '%s'", path)
def write_resources(config): INFO("Resources Data: Processing...") resources_path = os.path.join(config["source"], constants.RESOURCES_FOLDER) if not os.path.exists(resources_path): CRITICAL("Can't find: {}".format(resources_path)) emergency_exit() write_xml("Resources", indent=2) files = list(set(os.listdir(resources_path)) - set(constants.RESERVED_NAMES)) for res_name in sorted(files): res_path = os.path.join(resources_path, res_name) if not os.path.isfile(res_path): continue raw_name = res_name.split("_", 2) try: res_guid = UUID(raw_name[0]) except ValueError: res_guid = gen_guid() res_type = res_name.rsplit(".", 1) res_type = res_type[1] if len(res_type) == 2 else "res" else: res_type = raw_name[1] res_name = raw_name[2] attrs = { "ID": res_guid, "Name": res_name, "Type": res_type } DEBUG("Open file: %s", res_path) with open_file(res_path) as res_f: write_xml( tagname="Resource", attrs=attrs, indent=4, data=base64.b64encode(res_f.read()), close=True ) write_xml("Resources", indent=2, closing=True) INFO("Resources Data: Done!")
def detect_libraries(script_path): """Find all libs used by script """ if ACTION_EXT != ".py": return if "current" not in PARSER.pages: return page_id = PARSER.pages["current"] finder = ModuleFinder() try: DEBUG("Parsing: %s", script_path) finder.run_script(script_path) DEBUG("Done: %s", script_path) except Exception: ERROR("Can't parse script: %s", script_path) EXCEPTION("") return PARSER.pages[page_id]["libraries"].extend(finder.modules.keys()) PARSER.pages[page_id]["libraries"].extend(finder.badmodules.keys())
def main(): """Main function """ args_parser = argparse.ArgumentParser() args_parser.add_argument("source", type=str, help="aplication source folder") args_parser.add_argument("target", type=str, help="target XML file") args_parser.add_argument("-v", "--verbosity", action="count", help="be more verbose", default=0) args = args_parser.parse_args() # Setup logging system and show necessary messages setup_logging(logging.INFO if args.verbosity == 0 else logging.DEBUG, module_name=True if args.verbosity > 1 else False) INFO("") INFO("Information logging turned on") DEBUG("Debug logging turned on") INFO("") INFO(BLOCK_END) INFO("") config = { "target": { "path": args.target, }, "source": args.source } # Main process starting build(config) INFO("\nPath to application XML:\n{}".format(config["target"]["path"]))
def child_start(self, tagname, attrs): tag_handlers_map = { "information": InformationTagHandler, "libraries": LibrariesTagHandler, "resources": ResourcesTagHandler, "databases": DatabasesTagHandler, "objects": PagesTagHandler, "e2vdom": E2vdomTagHandler, "structure": StructureTagHandler, "security": SecurityTagHandler, } if PARSER.config["parse_all"] or PARSER.config["parse"]["app_actions"]: tag_handlers_map["actions"] = ActionsTagHandler handler_cls = tag_handlers_map.get(tagname.lower(), None) if handler_cls: handler_cls().start(tagname, attrs) else: DEBUG("%s found unhandled tag '%s'", self.tagname, tagname)
def child_start(self, tagname, attrs): if tagname == "Object": cls = DummyObjectTagHandler if PARSER.config["parse_all"] or \ PARSER.config["parse"]["pages"]: if not check_by_regexps(attrs["Name"], IGNORE["Pages"]): cls = PageTagHandler PARSER.pages[attrs["ID"]] = { "id": attrs["ID"], "name": attrs["Name"], "events": [], "actions": {}, "guids": [], "libraries": [] } PARSER.pages["current"] = attrs["ID"] else: DEBUG("Ignore page: %s", attrs["Name"]) cls(tagname, attrs).register()
def copy_files(target, sources, config): """Copy files """ if not isinstance(sources, (list, tuple)): sources = (sources, ) copied_files = [] for source in sources: path = "" params = {"rename": None, "exclude": None, "include": None} # it can be single file or folder # with additional params like rename, # exclude, include if isinstance(source, dict): path = normalize_path(source["path"], config) for param in params: params[param] = val = source.get(param, None) if val and not isinstance(val, (list, tuple, dict)): params[param] = (val, ) if params["exclude"]: params["exclude"] = convert_to_regexp(params["exclude"]) if params["include"]: params["include"] = convert_to_regexp(params["include"]) # it can be single file or folder # without additional params else: path = normalize_path(source, config) # fetch all files if @path is directory if os.path.isdir(path): files = os.listdir(path) # else split @path to parent path and file name else: path, name = os.path.split(path.rstrip("\/")) files = (name, ) for name in files: source_path = os.path.join(path, name) if os.path.isdir(source_path): DEBUG("Directories are not supported: {}".format(source_path)) continue # if file in exclude list - continue if params["exclude"] and check_by_regexps(name, params["exclude"]): continue # if file not in include list - continue if params["include"] and not check_by_regexps( name, params["include"]): continue # if file in rename list - rename it, else use source name if params["rename"] and \ (name in params["rename"] or len(files) == 1): if isinstance(params["rename"], (tuple, list)): new_name = params["rename"][0] else: new_name = params["rename"].get(name, name) else: new_name = name target_path = os.path.join(target, new_name) if os.path.exists(source_path): DEBUG("Copy '%s' to '%s'", source_path, target_path) shutil.copy2(source_path, target_path) copied_files.append(new_name) else: ERROR("No such file or directory: '{}'".format(source_path)) return copied_files
def copy_pages(config): """Copy pages and change resources and objects GUIDs """ if "Pages" not in config: INFO("No information about pages") return target_path = os.path.join(config["target"]["path"], constants.PAGES_FOLDER) pages = config["Pages"] params = { # "rename": None, "exclude": None, "include": None } if not isinstance(pages, (list, tuple)): pages = (pages, ) new_pages = [] for page in pages: if isinstance(page, (str, unicode)): _page = {"path": normalize_path(page, config)} if not _page["path"].rstrip("\/").lower().endswith("pages"): new_pages.append(page) else: if isinstance(page, dict): _page["path"] = normalize_path(page["path"], config) for param in params: params[param] = val = page.get(param, None) if val and not isinstance(val, (list, tuple, dict)): params[param] = (val, ) if params["exclude"]: params["exclude"] = convert_to_regexp(params["exclude"]) if params["include"]: params["include"] = convert_to_regexp(params["include"]) if not os.path.exists(_page["path"]): ERROR("No such directory: '%s'", _page["path"]) continue for folder in os.listdir(_page["path"]): # if folder in exclude list - continue if params["exclude"] and check_by_regexps( folder, params["exclude"]): continue # if folder not in include list - continue if params["include"] and not check_by_regexps( folder, params["include"]): continue folder_path = os.path.join(_page["path"], folder) if not os.path.isdir(folder_path): ERROR("Page can't be file: %s", folder_path) continue new_page = _page.copy() new_page["path"] = folder_path new_pages.append(new_page) for page in new_pages: if isinstance(page, (str, unicode)): page = {"path": page} # normalize path - replace alias with real path page["path"] = normalize_path(page["path"], config) if not os.path.exists(page["path"]): ERROR("No such directory: '{}'".format(page["path"])) continue # if name not defined - got it from folder name if not page.get("name", ""): page["name"] = os.path.split(page["path"].rstrip("\/"))[1] page["rename"] = False copy_path = os.path.join(target_path, page["name"]) if os.path.exists(copy_path): ERROR("Directory already exists: '{}'".format(copy_path)) continue if page.get("mode", "") not in ("move", "copy"): page["mode"] = "move" # copy page to new folder DEBUG("Copy '{}' to '{}'".format(page["path"], copy_path)) shutil.copytree(page["path"], copy_path) info_path = os.path.join(copy_path, constants.INFO_FILE) with fopen(info_path, "rb") as hdlr: info = json_load(hdlr, critical=True) if page.get("rename", True): info["attrs"]["Name"] = page["name"] with fopen(info_path, "wb") as hdlr: json_dump(info, hdlr, critical=True) # if page not copied continue, else need to change all guids to new if page["mode"] == "move": continue new_guid = gen_guid() old_guid = info["attrs"]["ID"] GUIDS_TO_REPLACE[old_guid] = new_guid GUIDS_TO_REPLACE[old_guid.replace("-", "_")] = new_guid.replace("-", "_") INFO("Pages were copied successfully")
def write_e2vdom(config): INFO("E2VDOM Data: Processing...") write_xml("E2vdom", indent=2) pages_path = os.path.join(config["source"], constants.PAGES_FOLDER) all_events = [] all_actions = [] for name in os.listdir(pages_path): e2vdom_path = os.path.join(pages_path, name, constants.E2VDOM_FILE) if not os.path.exists(e2vdom_path): INFO("No file %s; skipping E2VDOM for %s", e2vdom_path, name) continue else: DEBUG("Open file: %s", e2vdom_path) with open_file(e2vdom_path) as e2vdom_file: e2vdom = json_load(e2vdom_file, critical=True) all_events.extend(e2vdom["events"]) all_actions.extend(e2vdom["actions"]) INFO("E2VDOM Data: Writing events") write_xml("Events", indent=4) for event in all_events: actions = event.pop("actions", []) write_xml("Event", attrs=event, indent=6) for action in actions: write_xml( "Action", attrs={"ID": action}, indent=8, data="", close=True ) write_xml("Event", indent=6, closing=True) write_xml("Events", indent=4, closing=True) INFO("E2VDOM Data: Events done!") INFO("E2VDOM Data: Writing actions") write_xml("Actions", indent=4) for action in all_actions: params = action.pop("Params", []) write_xml("Action", attrs=action, indent=6) for key, value in params: write_xml( "Parameter", attrs={"ScriptName": key}, indent=8, data=value, close=True ) write_xml("Action", indent=6, closing=True) write_xml("Actions", indent=4, closing=True) write_xml("E2vdom", indent=2, closing=True) INFO("E2VDOM Data: Actions done!") INFO("E2VDOM Data: Done!")
def main(): """Main function """ args_parser = argparse.ArgumentParser() args_parser.add_argument("source", type=argparse.FileType("rb"), help="application XML file") args_parser.add_argument("-t", "--target", type=str, help="target folder") args_parser.add_argument("-v", "--verbosity", action="count", help="be more verbose", default=0) args_parser.add_argument("-e", "--erase", action="store_true", help="erase target folder") args_parser.add_argument("-q", "--quiet", action="store_true", help="no user interaction") args_parser.add_argument("-i", "--ignore-cfg", type=argparse.FileType("rb"), help="ignore config file") args_parser.add_argument("-l", "--libraries", action="store_true", help="parse libraries") args_parser.add_argument("-p", "--pages", action="store_true", help="parse pages") args_parser.add_argument("-d", "--databases", action="store_true", help="parse databases") args_parser.add_argument("-r", "--resources", action="store_true", help="parse resources") args_parser.add_argument("-n", "--info", action="store_true", help="parse information") args_parser.add_argument("-s", "--security", action="store_true", help="parse security") args_parser.add_argument("-u", "--structure", action="store_true", help="parse structure") args_parser.add_argument("-o", "--e2vdom", action="store_true", help="parse e2vdom") args_parser.add_argument("-c", "--app-actions", action="store_true", help="parse application actions") args_parser.add_argument("-ds", "--delete-source", action="store_true", help="delete source .xml file") args = args_parser.parse_args() # Setup logging system and show necessary messages log_level = logging.INFO if not args.verbosity else logging.DEBUG show_module_name = args.verbosity > 1 setup_logging(log_level, module_name=show_module_name) INFO("") INFO("Information logging turned on") DEBUG("Debug logging turned on") INFO("") INFO(BLOCK_END) INFO("") ignore = args.ignore_cfg if ignore: INFO("Parsing: 'ignore' configuration file") ignore = json_load(ignore, critical=True) INFO("Done: 'ignore' configuration file") config = { "target": { "path": args.target or os.path.split(args.source.name)[-1].split(".")[0], "erase": args.erase, "quiet": args.quiet, }, "source": args.source, "ignore": ignore, "delete_source": args.delete_source, "parse": { "app_actions": args.app_actions, "e2vdom": args.e2vdom, "structure": args.structure, "security": args.security, "info": args.info, "resources": args.resources, "databases": args.databases, "pages": args.pages, "libraries": args.libraries }, } parse_all = False for val in config["parse"].values(): parse_all = parse_all or val config["parse_all"] = not parse_all # Main process starting parse(config) if config["delete_source"] and os.path.exists(args.source.name): args.source.close() os.remove(args.source.name) INFO("\nPath to application:\n{}".format(config["target"]["path"]))