def write_cnf_to_file(cnf_f, cnf_type_with_ext, release_note_creator, server): try: if cnf_f: file_only_name = cnf_type_with_ext.split(".cnf")[0] _global_val = None if file_only_name == 'globalVariables': _cnf_to_write, _global_val = ParserTool.parse_global_var(cnf_f) else: _cnf_to_write = ParserTool.parse_cnf_file( file_only_name, cnf_f) if not _cnf_to_write: if os.path.exists(cnf_f): os.remove(cnf_f) return if file_only_name == 'globalVariables': release_note_creator.add_object_to_release_note( file_only_name, server, objects_key=_cnf_to_write, objects_value=_global_val) else: release_note_creator.add_object_to_release_note( file_only_name, server, objects_key=_cnf_to_write) with open(cnf_f, 'w') as _f: for el in _cnf_to_write: _f.write(el + "\n") _f.close() except IOError as er: logger.error("[ERROR]: Error for cnf file " + cnf_f + " error" + str(er.strerror)) sys.exit(-1)
def fill_list_file(self): logger.debug('Creating list file for %s' % self.data_type) # Config are not wrote to list file custom_config_ds = sorted(Pt.get_data_type_set(self.log_file, self.config_regex_properties, self.config_xpath, is_full=self.is_full)) self.cnf_list = sorted(Pt.get_data_type_set(self.log_file, self.config_regex_cnf, self.config_xpath, is_full=self.is_full)) cache_ds = sorted(Pt.get_data_type_set(self.log_file, self.cache_element_regex, self.config_xpath, is_full=self.is_full)) try: with open(self.list_file, 'w') as list_file: # 1. Return first custom config for custom_conf in custom_config_ds: server, _property, rel_string = self.get_server_from_property_and_return_element(custom_conf) self.config_by_server[server].add("bam_cstcfg_" + _property) self.release_note_objects_by_server[server].add("bam_cstscfg_" + rel_string) for _cnf in self.cnf_list: server, _cnf, rel_string = self.get_server_from_property_and_return_element(_cnf) self.config_by_server[server].add("bam_cf_is_" + _cnf) self.release_note_objects_by_server[server].add("bam_cf_is_" + rel_string) for _cache in cache_ds: server, _cnf, rel_string = self.get_server_from_property_and_return_element(_cache) self.config_by_server[server].add("bam_cf_is_" + _cnf) self.cache_list_output.append("bam_cf_is_" + rel_string) #self.release_note_objects_by_server[server].add("bpm_cf_is_" + rel_string) for server in self.server_list: for el in sorted(self.config_by_server[server]): list_file.write(el + "\n") self.output_config.append("%s\n" % el) for el in sorted(self.release_note_objects_by_server[server]): self.release_note_all_objs.append("%s\n" % el) list_file.close() except IOError as e: logger.error('Problem occurs for output file for config %s ' % str(e))
def write_delta_cnf_to_file(cnf_file_current, cnf_file_previous, cnf_type_with_ext, out_file, release_note_creator, server): try: file_only_name = cnf_type_with_ext.split(".cnf")[0] _global_val, _p_global_val = None, None if cnf_file_current and cnf_file_previous: if file_only_name == 'globalVariables': _cnf_current, _global_val = ParserTool.parse_global_var( cnf_file_current) _cnf_previous, _p_global_val = ParserTool.parse_global_var( cnf_file_previous) else: _cnf_current = set( ParserTool.parse_cnf_file(file_only_name, cnf_file_current)) _cnf_previous = set( ParserTool.parse_cnf_file(file_only_name, cnf_file_previous)) if _cnf_current is None and _cnf_previous is None: release_note_creator.signal_no_cnf(file_only_name) return diff = _cnf_current.difference(_cnf_previous) if len(diff) == 0: return 0 else: if file_only_name == 'globalVariables': temp_g_key, temp_g_val = list(), dict() for key in _global_val: # In this case we want to check if a global var is changed if key not in diff and _global_val[ key] != _p_global_val[key]: diff.add(key) for el in diff: temp_g_key.append(el) temp_g_val[el] = _global_val[el] release_note_creator.add_object_to_release_note( file_only_name, server, objects_key=temp_g_key, objects_value=temp_g_val) else: release_note_creator.add_object_to_release_note( file_only_name, server, diff) with open(out_file, 'w') as f: for el in diff: if el: f.write(el + "\n") f.close() else: release_note_creator.signal_no_cnf(file_only_name) logger.warning('there is one file missing check ' + cnf_file_current + ' | ' + cnf_file_previous) except IOError as io_e: logger.warning("Error for cnf file " + cnf_file_current + " or " + cnf_file_previous + " strerror" + str(io_e.strerror)) sys.exit(-1)
def fill_list_file(self): logger.debug('Creating list file for %s' % self.data_type) caf_ds = sorted( Pt.get_data_type_set(self.log_file, self.caf_regex, self.caf_xpath, is_full=self.is_full)) caf_server = self.server_list[0] try: with open(self.list_file, "w") as list_file: for caf_record in caf_ds: if 'MWSUserManagement' in caf_record: out_string = self.caf_cf_header + caf_record[ 1:] + ":" + caf_server out_rel_s = "%s \t %s \t %s" \ % (self.caf_cf_release_header, caf_record[1:], caf_server) else: out_string = self.caf_portlet_header + caf_record[ 1:] + ":" + caf_server out_rel_s = "%s \t %s \t %s" \ % (self.caf_portlet_r_header, caf_record[1:], caf_server) self.output_caf.append("%s\n" % out_string) self.release_note_all_objs.append("%s\n" % out_rel_s) list_file.write("%s\n" % out_string) list_file.close() except IOError: logger.error("IO ERROR check -> caf list file -> " + self.list_file)
def fill_list_file(self): logger.debug('Creating list file for %s' % self.data_type) db_res = sorted( Pt.get_data_type_set(self.log_file, self.db_regex, self.db_xpath, is_full=self.is_full)) for db_el in db_res: db_el_lower = str(db_el).lower() if 'ddl' in db_el_lower and 'rollback' in db_el_lower: self.db_ddl_rbk.append(db_el) elif 'ddl' in db_el_lower and 'rollback' not in db_el_lower: self.db_ddl.append(db_el) elif 'dml' in db_el_lower and 'rollback' in db_el_lower: self.db_dml_rbk.append(db_el) elif 'dml' in db_el_lower and 'rollback' not in db_el_lower: self.db_dml.append(db_el) sql_header = 'bpm_sql:' sql_header_r = 'bpm_sql' sql_rbk_header = 'bpm_sql_rbck:' sql_rbk_header_r = 'bpm_sql_rbck' if len(self.server_list) > 1: logger.warning( 'Database have more than 1 server target...check it') server = ':' + self.server_list[0] server_r = self.server_list[0] list_file = open(self.list_file, 'w') for ddl in self.db_ddl: if str(ddl).index('/') == 0: ddl = ddl[1::] el = sql_header + ddl + server el_v = "%s \t %s \t %s" % (sql_header_r, ddl, server_r) self.db_output.append("%s\n" % el) self.release_note_all_objs.append("%s\n" % el_v) list_file.write("%s\n" % el) for dml in self.db_dml: if str(dml).index('/') == 0: dml = dml[1::] el = sql_header + dml + server el_v = "%s \t %s \t %s" % (sql_header_r, dml, server_r) self.db_output.append("%s\n" % el) self.release_note_all_objs.append("%s\n" % el_v) list_file.write("%s\n" % el) for dml_rbk in self.db_dml_rbk: if str(dml_rbk).index('/') == 0: dml_rbk = dml_rbk[1::] el = sql_rbk_header + dml_rbk + server el_v = "%s \t %s \t %s" % (sql_rbk_header_r, dml_rbk, server_r) self.release_note_all_objs.append("%s\n" % el_v) self.db_output.append("%s\n" % el) list_file.write("%s\n" % el) for ddl_rbk in self.db_ddl_rbk: if str(ddl_rbk).index('/') == 0: ddl_rbk = ddl_rbk[1::] el = sql_rbk_header + ddl_rbk + server el_v = "%s \t %s \t %s" % (sql_rbk_header_r, ddl_rbk, server_r) self.db_output.append("%s\n" % el) self.release_note_all_objs.append("%s\n" % el_v) list_file.write("%s\n" % el) list_file.close()
def fill_list_file(self): logger.debug('Creating list file for %s' % self.data_type) try: with open(self.list_file, 'w') as list_file: pkg_set_from_log = sorted( Pt.get_data_type_set(self.log_file, self.packages_regex, self.packages_xpath, list_file, self.pkg_to_server, self.version_holder, is_full=self.is_full)) for pkg in pkg_set_from_log: self.fill_server_packages(str(pkg)) # Now process the server for server in self.server_list: for pkg in sorted(self.output_packages_by_server[server]): pkg_el = "%s\n" % pkg if server == 'bpm_is_default': self.output_list_def.append(pkg_el) else: self.output_list_for_pkg_no_def.append(pkg_el) for el in sorted( self.release_note_objects_by_server[server]): pkg_el = "%s\n" % el self.release_note_all_objs.append(pkg_el) list_file.close() except IOError as e: logger.error('Problem occurs for output file for package %s ' % str(e))
def fill_list_file(self): logger.debug('Creating list file for %s' % self.data_type) try: with open(self.list_file, 'w') as list_file: prj_set_from_log = sorted( Pt.get_data_type_set(self.log_file, self.bpm_projects_regex, self.bpm_projects_xpath, list_file, self.prj_to_server, self.version_holder, is_full=self.is_full)) for prj in prj_set_from_log: self.fill_server_processes(str(prj)) # Now process the server for server in self.server_list: for prj_e in sorted(self.output_process_by_server[server]): prj_el = "%s\n" % prj_e self.output_list.append(prj_el) for out_v in sorted( self.release_note_objects_by_server[server]): prj_el = "%s\n" % out_v self.release_note_all_objs.append(prj_el) list_file.close() except IOError as e: logger.error('Problem occurs for output file for processes %s ' % str(e))
def __init__(self, data_type, server_list, log_file, list_file, translator, base_url, is_full=False): super(Process, self).__init__(data_type, server_list, log_file, list_file, is_full) self.translator_file = translator self.prj_to_server = Pt.parse_map_file(self.translator_file, 'target_project_server', 'project') logger.debug('Translator file for projects parsed correctly') self.output_process_by_server = defaultdict(set) self.output_list = list() self.base_url = base_url+'/bamProjects/' self.version_holder = Vt.VersionHolder(self.base_url, package_flag=False, full_mode=self.is_full) self.fill_list_file()
def __init__(self, data_type, server_list, log_file, list_file, translator, base_url, is_full=False): super(Package, self).__init__(data_type, server_list, log_file, list_file, is_full) self.translator_file = translator self.pkg_to_server = Pt.parse_map_file(self.translator_file, 'target_package_server', 'package') logger.debug('Translator file for package parsed correctly') # output for packages not in default self.output_list_for_pkg_no_def = list() # output for packages in default self.output_list_def = list() self.output_packages_by_server = defaultdict(set) self.base_url = base_url+'/packages/' self.version_holder = Vt.VersionHolder(self.base_url, full_mode=self.is_full) self.fill_list_file()
def fill_list_file(self): logger.debug('Creating list file for %s' % self.data_type) optimize_res = sorted(Pt.get_data_type_set(self.log_file, self.optimize_regex, self.optimize_xpath, is_full=self.is_full)) _f = open(self.list_file, 'w') for el in optimize_res: if str(el).index('/') == 0: el = el[1::] el_r = 'bam_o4p_cnf \t %s \t %s' % (el, self.server_list[0]) el = 'bam_o4p_cnf:' + el + ':' + self.server_list[0] self.output_optimize.append("%s\n" % el) self.release_note_all_objs.append(el_r) _f.write("%s\n" % el) _f.close()
def get_version_single_package(self, package): try: Svn_Tool.check_out_file(str(self.url + package + self.manifest)) (version, dep, startup) = ParserTool.parse_manifest(self.manifest_file, self.verbose) if self.verbose: self.versions[ package] = "%s \n Dependencies \n %s \n Startup: \n %s " % ( version, dep, startup) else: self.versions[package] = version if os.path.exists(self.manifest_file): os.remove(self.manifest_file) else: logger.warning("Manifest for %s doesn't exits" % package) return version except RuntimeError: logger.error('Error occurs while getting version for package %s ' % package)
def get_runtime_version(self, process): try: _process = str(process) if not _process.endswith('.process'): _process_extension = '.process' _process += _process_extension Svn_Tool.check_out_file(self.url + _process) process_xml = _process.split('/')[-1] process_file = os.path.join(os.getcwd(), process_xml) runtime_version = ParserTool.parse_process_file(process_file) if runtime_version is not None: self.versions[_process] = runtime_version if os.path.exists(process_file): os.remove(process_file) return runtime_version else: logger.warning( "Runtime version not extracted from this file %s", str(_process)) except RuntimeError: logger.error('Error occurs while getting version for process %s ' % process)
_parameters[bpm_p_point] = previous_point _parameters[artf_list] = artifact_list logger.debug( "Requested Delta mode on current_point: %s previous_point: %s for target_tag: %s and artifact_list %s" % (str(current_point), str(previous_point), str(_target_tag), str(artifact_list))) return False, _env, _parameters, _target_tag, _mode if __name__ == '__main__': config_file = os.path.abspath( os.path.join(os.path.dirname(__file__), '.', 'config/properties.conf')) log_config_file = os.path.abspath( os.path.join(os.path.dirname(__file__), '.', 'config/logger.conf')) # Parse config file conf = ParserTool.parse_config_file(config_file) init_log_from_file(log_config_file) # Starting parser is_a_full, env, parameters, target_tag, mode = start_parser() try: if mode == 'bam': svn_link = conf[bam_root] + parameters[bpm_c_point] else: svn_link = conf[bpm_root] + parameters[bpm_c_point] logger.debug("Checking link for current point for bpm %s " % svn_link) err_msg = SvnTool.check_svn_url(svn_link) if not err_msg: if mode == 'bpm': if is_a_full: logger.debug('Passed...Go Ahead')