def main(): """Main function """ args_parser = argparse.ArgumentParser() args_parser.add_argument("cfg", type=argparse.FileType("rb"), help="configuration file") args_parser.add_argument("target", type=str, help="target folder") args_parser.add_argument("-v", "--verbosity", action="count", help="be more verbose", default=0) args_parser.add_argument("-e", "--erase", action="store_true", help="erase target folder") args_parser.add_argument("-q", "--quiet", action="store_true", help="no user interaction") args = args_parser.parse_args() # Setup logging system and show necessary messages setup_logging(logging.INFO if args.verbosity == 0 else logging.DEBUG, module_name=True if args.verbosity > 1 else False) INFO("Information logging turned on") DEBUG("Debug logging turned on") # Parsing config file DEBUG("Parsing config file") config = json_load(args.cfg, critical=True) INFO("Config file parsed successfully") config["target"] = { "path": args.target, "erase": args.erase, "quiet": args.quiet, } # Main process starting make(config) INFO("") INFO("+" * 70) INFO("\nPath to application:\n{}".format(config["target"]["path"]))
def write_app_info(config): INFO("Application Information Data: Processing...") info_path = os.path.join(config["source"], constants.INFO_FILE) with open_file(info_path) as info_file: info_json = json_load(info_file, critical=True) write_xml("Information", indent=2) for tagname, value in info_json.items(): write_xml(tagname, data=value, close=True, indent=4) write_xml("Information", indent=2, closing=True) INFO("Application Information Data: Done!")
def write_object(path, name, indent): with open_file(os.path.join(path, name)) as obj_file: obj_json = json_load(obj_file, critical=True) if "Type" in obj_json["attrs"] \ and obj_json["attrs"]["Type"] in constants.EXTERNAL_SOURCE_TYPES \ and "source_file_name" in obj_json["attrs"]: source_file_name = obj_json["attrs"]["source_file_name"] del obj_json["attrs"]["source_file_name"] with open_file(os.path.join(path, source_file_name)) as source_file: obj_json["attributes"]["source"] = clean_data(source_file.read()).decode('utf-8') write_xml("Object", attrs=obj_json["attrs"], indent=indent) write_xml("Actions", indent=indent+2, data="", close=True) write_xml("Objects", indent=indent+2, data="", close=True) write_attributes(obj_json["attributes"], indent+2) write_xml("Object", indent=indent, closing=True)
def write_actions(path, indent): actions_map_path = os.path.join(path, constants.MAP_FILE) if not os.path.exists(actions_map_path): INFO("Can't find: %s; skipping Actions", actions_map_path) write_xml("Actions", indent=indent) write_xml("Actions", indent=indent, closing=True) return with open_file(actions_map_path) as actions_map_file: actions_map = json_load(actions_map_file, critical=True) write_xml("Actions", indent=indent) for action_name in sorted(os.listdir(path)): action_path = os.path.join(path, action_name) if not os.path.isfile(action_path) or \ action_name in constants.RESERVED_NAMES: continue attrs = actions_map.get(action_name, None) if not attrs: attrs = { "Top": "", "State": "", "Left": "", "ID": str(gen_guid()), "Name": action_name.split(".", 1)[0], } with open_file(action_path) as action_f: write_xml( tagname="Action", attrs=attrs, indent=indent+2, data=action_f.read(), close=True, force_cdata=True ) write_xml("Actions", indent=indent, closing=True)
def write_structure(config): INFO("Structure Data: Processing...") structure_path = os.path.join(config["source"], constants.STRUCT_FILE) if not os.path.exists(structure_path): ERROR("Can't find: {}".format(structure_path)) write_xml("Structure", indent=2, close=True) return write_xml("Structure", indent=2) with open_file(structure_path) as struct_file: struct_json = json_load(struct_file, critical=True) for obj in struct_json: write_xml("Object", attrs=obj, data="", close=True, indent=4) write_xml("Structure", indent=2, closing=True) INFO("Structure Data: Done!")
def create_application_info_file(config): """Create __info__.json in root directory """ DEBUG("Collect application info") app_info = config.get("ApplicationInfo", {}) # Load existing config file or create empty if "BaseConfigFile" in app_info: with open_file(app_info.pop("BaseConfigFile"), config) as hdlr: info = json_load(hdlr) else: info = dict(ID=gen_guid(), Name="Application", Description="", Owner="-", Active="1", Serverversion="", ScriptingLanguage="python", Icon="") # Update values from config for key, value in app_info.items(): info[key] = value # Generate new GUID if it isn't exiisting if not info.get("ID", ""): info["ID"] = gen_guid() # Write data to file path = os.path.join(config["target"]["path"], constants.INFO_FILE) DEBUG("Writing application info to '%s'", path) with fopen(path, "wb") as hdlr: json_dump(info, hdlr) INFO("Application info successfully written to '%s'", path)
def main(): """Main function """ args_parser = argparse.ArgumentParser() args_parser.add_argument("source", type=argparse.FileType("rb"), help="application XML file") args_parser.add_argument("-t", "--target", type=str, help="target folder") args_parser.add_argument("-v", "--verbosity", action="count", help="be more verbose", default=0) args_parser.add_argument("-e", "--erase", action="store_true", help="erase target folder") args_parser.add_argument("-q", "--quiet", action="store_true", help="no user interaction") args_parser.add_argument("-i", "--ignore-cfg", type=argparse.FileType("rb"), help="ignore config file") args_parser.add_argument("-l", "--libraries", action="store_true", help="parse libraries") args_parser.add_argument("-p", "--pages", action="store_true", help="parse pages") args_parser.add_argument("-d", "--databases", action="store_true", help="parse databases") args_parser.add_argument("-r", "--resources", action="store_true", help="parse resources") args_parser.add_argument("-n", "--info", action="store_true", help="parse information") args_parser.add_argument("-s", "--security", action="store_true", help="parse security") args_parser.add_argument("-u", "--structure", action="store_true", help="parse structure") args_parser.add_argument("-o", "--e2vdom", action="store_true", help="parse e2vdom") args_parser.add_argument("-c", "--app-actions", action="store_true", help="parse application actions") args_parser.add_argument("-ds", "--delete-source", action="store_true", help="delete source .xml file") args = args_parser.parse_args() # Setup logging system and show necessary messages log_level = logging.INFO if not args.verbosity else logging.DEBUG show_module_name = args.verbosity > 1 setup_logging(log_level, module_name=show_module_name) INFO("") INFO("Information logging turned on") DEBUG("Debug logging turned on") INFO("") INFO(BLOCK_END) INFO("") ignore = args.ignore_cfg if ignore: INFO("Parsing: 'ignore' configuration file") ignore = json_load(ignore, critical=True) INFO("Done: 'ignore' configuration file") config = { "target": { "path": args.target or os.path.split(args.source.name)[-1].split(".")[0], "erase": args.erase, "quiet": args.quiet, }, "source": args.source, "ignore": ignore, "delete_source": args.delete_source, "parse": { "app_actions": args.app_actions, "e2vdom": args.e2vdom, "structure": args.structure, "security": args.security, "info": args.info, "resources": args.resources, "databases": args.databases, "pages": args.pages, "libraries": args.libraries }, } parse_all = False for val in config["parse"].values(): parse_all = parse_all or val config["parse_all"] = not parse_all # Main process starting parse(config) if config["delete_source"] and os.path.exists(args.source.name): args.source.close() os.remove(args.source.name) INFO("\nPath to application:\n{}".format(config["target"]["path"]))
def write_e2vdom(config): INFO("E2VDOM Data: Processing...") write_xml("E2vdom", indent=2) pages_path = os.path.join(config["source"], constants.PAGES_FOLDER) all_events = [] all_actions = [] for name in os.listdir(pages_path): e2vdom_path = os.path.join(pages_path, name, constants.E2VDOM_FILE) if not os.path.exists(e2vdom_path): INFO("No file %s; skipping E2VDOM for %s", e2vdom_path, name) continue else: DEBUG("Open file: %s", e2vdom_path) with open_file(e2vdom_path) as e2vdom_file: e2vdom = json_load(e2vdom_file, critical=True) all_events.extend(e2vdom["events"]) all_actions.extend(e2vdom["actions"]) INFO("E2VDOM Data: Writing events") write_xml("Events", indent=4) for event in all_events: actions = event.pop("actions", []) write_xml("Event", attrs=event, indent=6) for action in actions: write_xml( "Action", attrs={"ID": action}, indent=8, data="", close=True ) write_xml("Event", indent=6, closing=True) write_xml("Events", indent=4, closing=True) INFO("E2VDOM Data: Events done!") INFO("E2VDOM Data: Writing actions") write_xml("Actions", indent=4) for action in all_actions: params = action.pop("Params", []) write_xml("Action", attrs=action, indent=6) for key, value in params: write_xml( "Parameter", attrs={"ScriptName": key}, indent=8, data=value, close=True ) write_xml("Action", indent=6, closing=True) write_xml("Actions", indent=4, closing=True) write_xml("E2vdom", indent=2, closing=True) INFO("E2VDOM Data: Actions done!") INFO("E2VDOM Data: Done!")
def walk(path, name, indent): new_path = os.path.join(path, name) actions_folder = "Actions-{}".format(name) info_path = os.path.join(new_path, constants.INFO_FILE) if not os.path.exists(info_path): CRITICAL("Can't find: {}".format(info_path)) emergency_exit() with open_file(info_path) as info_file: info_json = json_load(info_file, critical=True) attrs = info_json["attrs"] if attrs is not None and 'ID' in attrs: id = attrs['ID'] if id in OBJS: ERROR("Encountered duplicate GUID: {duplicate} duplicates {origin}: Ignoring {duplicate}".format( duplicate=name, origin=OBJS[id] )) return else: OBJS[id] = name write_xml("Object", attrs=attrs, indent=indent) write_actions(os.path.join(new_path, actions_folder), indent+2) write_xml("Objects", indent=indent+2) childs_order_path = os.path.join(new_path, constants.CHILDS_ORDER) if os.path.exists(childs_order_path): with open(childs_order_path) as f: names = json_load(f, default=[], critical=False) names = map(lambda s: s.lower(), names) childs_order = dict(zip(names, xrange(len(names)))) else: childs_order = {} max_value = len(childs_order) + 1 def key_func(name): key = name.lower() if key.endswith('.json'): key = key[:-5] return [childs_order.get(key, max_value), name] nodes = list(set(os.listdir(new_path)) - set(constants.RESERVED_NAMES) - {actions_folder}) nodes = [node for node in nodes if not constants.RESERVED_NAMES_REGEXP.match(node)] ordered_nodes = sorted(nodes, key=key_func) for name in ordered_nodes: if os.path.isdir(os.path.join(new_path, name)): walk(new_path, name, indent+4) else: write_object(new_path, name, indent+4) write_xml("Objects", indent=indent+2, closing=True) write_attributes(info_json["attributes"], indent+2) write_xml("Object", indent=indent, closing=True)
def write_security(config): INFO("Security Data: Processing...") security_path = os.path.join(config["source"], constants.SECURITY_FOLDER) if not os.path.exists(security_path): INFO("Can't find: {}".format(security_path)) return groups_and_users_path = \ os.path.join(security_path, constants.USERS_GROUPS_FILE) if os.path.exists(groups_and_users_path): with open_file(groups_and_users_path) as ug_file: ug_json = json_load(ug_file, critical=True) else: ug_json = {} write_xml("Security", indent=2) write_xml("Groups", indent=4, close=True) write_xml("Users", indent=4) INFO("Security Data: Writing users") for user in ug_json.get("users", []): write_xml("User", indent=6) for key, value in user.items(): if key == "Rights": write_xml("Rights", indent=8) for right in value: write_xml( "Right", attrs=right, indent=10, close=True ) write_xml("Rights", indent=8, closing=True) else: write_xml( key, data=value, indent=8, close=True, force_cdata=True ) write_xml("User", indent=6, closing=True) write_xml("Users", indent=4, closing=True) INFO("Security Data: Users done!") INFO("Security Data: Writing LDAP") ldap_path = os.path.join(security_path, constants.LDAP_LDIF) if os.path.exists(ldap_path): with open_file(ldap_path) as ldap_file: write_xml( "LDAP", indent=4, data=base64.b64encode(ldap_file.read()), close=True ) else: write_xml("LDAP", indent=4, data="", close=True) write_xml("Security", indent=2, closing=True) INFO("Security Data: Done!")
def copy_pages(config): """Copy pages and change resources and objects GUIDs """ if "Pages" not in config: INFO("No information about pages") return target_path = os.path.join(config["target"]["path"], constants.PAGES_FOLDER) pages = config["Pages"] params = { # "rename": None, "exclude": None, "include": None } if not isinstance(pages, (list, tuple)): pages = (pages, ) new_pages = [] for page in pages: if isinstance(page, (str, unicode)): _page = {"path": normalize_path(page, config)} if not _page["path"].rstrip("\/").lower().endswith("pages"): new_pages.append(page) else: if isinstance(page, dict): _page["path"] = normalize_path(page["path"], config) for param in params: params[param] = val = page.get(param, None) if val and not isinstance(val, (list, tuple, dict)): params[param] = (val, ) if params["exclude"]: params["exclude"] = convert_to_regexp(params["exclude"]) if params["include"]: params["include"] = convert_to_regexp(params["include"]) if not os.path.exists(_page["path"]): ERROR("No such directory: '%s'", _page["path"]) continue for folder in os.listdir(_page["path"]): # if folder in exclude list - continue if params["exclude"] and check_by_regexps( folder, params["exclude"]): continue # if folder not in include list - continue if params["include"] and not check_by_regexps( folder, params["include"]): continue folder_path = os.path.join(_page["path"], folder) if not os.path.isdir(folder_path): ERROR("Page can't be file: %s", folder_path) continue new_page = _page.copy() new_page["path"] = folder_path new_pages.append(new_page) for page in new_pages: if isinstance(page, (str, unicode)): page = {"path": page} # normalize path - replace alias with real path page["path"] = normalize_path(page["path"], config) if not os.path.exists(page["path"]): ERROR("No such directory: '{}'".format(page["path"])) continue # if name not defined - got it from folder name if not page.get("name", ""): page["name"] = os.path.split(page["path"].rstrip("\/"))[1] page["rename"] = False copy_path = os.path.join(target_path, page["name"]) if os.path.exists(copy_path): ERROR("Directory already exists: '{}'".format(copy_path)) continue if page.get("mode", "") not in ("move", "copy"): page["mode"] = "move" # copy page to new folder DEBUG("Copy '{}' to '{}'".format(page["path"], copy_path)) shutil.copytree(page["path"], copy_path) info_path = os.path.join(copy_path, constants.INFO_FILE) with fopen(info_path, "rb") as hdlr: info = json_load(hdlr, critical=True) if page.get("rename", True): info["attrs"]["Name"] = page["name"] with fopen(info_path, "wb") as hdlr: json_dump(info, hdlr, critical=True) # if page not copied continue, else need to change all guids to new if page["mode"] == "move": continue new_guid = gen_guid() old_guid = info["attrs"]["ID"] GUIDS_TO_REPLACE[old_guid] = new_guid GUIDS_TO_REPLACE[old_guid.replace("-", "_")] = new_guid.replace("-", "_") INFO("Pages were copied successfully")