def process_file(configfile, inputfile, outputfile, compressiontype, complevel, ciphertype, testall, append, pagesingroups) : block = Block(default_compress_level=complevel) # Generic resultados = [] size_test_file = os.path.getsize(inputfile) compresiones = [e for e in block.compressor.available_types if e[0] == compressiontype] encriptados = [e for e in block.cipher.available_types if e[0] == ciphertype] if testall: if 'e' in testall: encriptados = block.cipher.available_types if 'c' in testall: compresiones = block.compressor.available_types mode = "ab" if append else "wb" r = ReportMatcher(configfile) for encriptado in encriptados: for compress in compresiones: print("Procesando: {2} Compresión: [{0}] {1} Cifrado: {3}".format(compress[0], compress[1], inputfile, encriptado[1])) start = time.time() paginas = 0 file_name = "{0}.{1}.oerm".format(outputfile, slugify("{0}.{1}".format(compress[1], encriptado[1]), "_")) db = Database( file=file_name, mode=mode, default_compress_method=compress[0], default_compress_level=complevel, default_encription_method=encriptado[0], pages_in_container = pagesingroups) reportname_anterior = "" # spool = SpoolHostReprint(inputfile, buffer_size=102400, encoding="Latin1") spool = SpoolFixedRecordLength(inputfile, buffer_size=102400, encoding="cp500", newpage_code="NEVADO" ) # with SpoolHostReprint(inputfile, buffer_size=102400, encoding="Latin1") as s: with spool as s: for page in s: data = r.match(page) reportname = data[0] if reportname != reportname_anterior: db.add_report(reporte=reportname, sistema=data[1], aplicacion=data[2], departamento=data[3], fecha=data[4]) reportname_anterior = reportname paginas = paginas + 1 db.add_page(page) db.close() compress_time = time.time() - start compress_size = os.path.getsize(file_name) start = time.time() db = Database(file=file_name, mode="rb") for report in db.reports(): try: for page in report: pass except Exception as err: print("Error: {0} al descomprimir reporte".format(err)) break uncompress_time = time.time() - start container_size = compress_size / (db.Index.container_objects + db.Index.metadata_objects) resultados.append([ "[{0}] {1} ({2}p/cont.)".format(compress[0], compress[1], pagesingroups), ("" if encriptado[0] == 0 else encriptado[1]), float(size_test_file), float(compress_size), (compress_size/size_test_file)*100, paginas/compress_time, paginas/uncompress_time, container_size ]) tablestr = tabulate( tabular_data = resultados, headers = ["Algoritmo", "Encript.", "Real (bytes)", "Compr. (bytes)", "Ratio", "Compr. Pg/Seg", "Descompr. Pg/Seg", "BSize (Prom.)" ], floatfmt = "8.2f", tablefmt = "psql", numalign = "right", stralign = "left", override_cols_fmt = [None, None, ",.0f", ",.0f",",.2f", ",.2f", ",.2f", ",.2f", ",.2f" ] ) return tablestr
compress_time = time.time() - start compress_size = os.path.getsize(file_name) start = time.time() db = Database(file=file_name, mode="rb") for report in db.reports(): for page in report: pass uncompress_time = time.time() - start resultados.append([ "[{0}] {1}".format(item[0], item[1]), ("" if encriptado == 0 else "Si"), size_test_file, compress_size, (compress_size / size_test_file) * 100, compress_time, paginas / compress_time, uncompress_time, paginas / uncompress_time ]) tablestr = tabulate(tabular_data=resultados, headers=[ "Algoritmo", "Encriptado", "Real (bytes)", "Comprimido (bytes)", "Ratio", "Compr. seg.", "Pag/Seg", "Descompr. seg.", "Pag/Seg" ], floatfmt="8.2f", tablefmt="psql", numalign="right", stralign="left") print("") print(tablestr) print("")
paginas += pg.max_page_count k = "{0}. {1} comprimido con {2} (páginas: {3})".format(tipo_bloque, b.block_types[tipo_bloque], b.compressor.available_types[tipo_compresion][1], pg.max_page_count) else: k = "{0}. {1} comprimido con {2}".format(tipo_bloque, b.block_types[tipo_bloque], b.compressor.available_types[tipo_compresion][1]) totales[k] = tuple(map(lambda x, y: x + y, totales.get(k, (0, 0)), (longitud_bloque, 1))) elapsed = time.time() - start print("") print("Archivo : {0}".format(test_file)) print("Tamaño en bytes : {:,.0f}".format(size_test_file)) print("Total de paginas : {:,.0f}".format(paginas)) print("Tiempo de lectura: {:,.2f}".format(elapsed)) print("Tiempo por pag. : {:,.8f}".format(elapsed/paginas)) print("Páginas/Segundos : {:,.0f}".format(paginas/elapsed)) lista = [(v[0], v[1][0], v[1][1], v[1][0]/v[1][1]) for v in totales.items()] if lista: tablestr = tabulate( tabular_data = lista, headers = ["Tipo bloque", "Tamaño total", "Cantidad", "Bytes promedio por bloque" ], floatfmt = ",.2f", tablefmt = "psql", numalign = "right", stralign = "left", override_cols_fmt = [None, ",.0f", ",.0f", ",.2f"] ) print("") print(tablestr) print("")
testRunner = CustomTestRunner() if len(sys.argv) < 1: test_pattern = sys.argv[1] testsuite = unittest.TestLoader().discover(start_dir='.', pattern=test_pattern) else: testsuite = unittest.TestLoader().discover(start_dir='.') results = testRunner.run(testsuite) tablestr = tabulate(tabular_data=[(r[0], r[1], r[2], r[3], r[4]) for r in results.resultados], headers=["#", "Id", "Test", "Tiempo (segs)", "Status"], floatfmt="8.2f", tablefmt="psql", numalign="right", stralign="left") print("") print("Estatus de los tests:") print("=====================") print("") print(tablestr) print("") errores = [(r[0], r[1], r[2], r[3], r[4], r[5]) for r in results.resultados if r[5]] lista = [] for error in [(r[0], r[1], r[2], r[3], r[4], r[5]) for r in results.resultados if r[5]]:
# Listar reportes en la base oerm if args.listreports or (not args.showpages and not args.searchtext): reports_list = [] for report in d.reports(): reports_list.append((report.id, report.nombre, report.total_pages)) if reports_list: print("") print("Archivo : {0}".format(filename)) print("Reportes : {0}".format(len(reports_list))) print("Páginas : {0}".format(sum([e[2] for e in reports_list]))) print("") tablestr = tabulate(tabular_data=reports_list, headers=["Reporte", "Nombre", "Páginas"], floatfmt=",.2f", tablefmt="psql", numalign="right", stralign="left", override_cols_fmt=[None, None, ",.0f", None]) print(tablestr) print("") d.close() sys.exit(0) # Consultar un reporte en particular if args.reportid: report = d.reports().get_report(int(args.reportid)) if args.showpages:
def query_reports(self, reporte=None, sistema=None, aplicacion=None, departamento=None, fecha=None, limit=None, returntype="list"): """Consulta básica para buscar un reporte en los repositorios del catalogo. La busqueda se hace por cualquier de los atributos básicos de un reporte, y se puede hacer búsquedas parciales tipo LIKE en sql Args: report (string): Nombre del reporte sistema (string): Nombre del sistema aplicacion (string): Nombre de la aplicación departamento (string): Nombre del departamento fecha (string): Fecha de emsión del reporte limit (int): cantidad máxima de resultados returntype (string): Tipo de retorno Returns: list/string Ejemplo: >>> from openerm.OermClient import OermClient >>> c = OermClient("samples/openermcfg.yaml") >>> c.open_catalog("local-test") >>> c.open_repo("Prueba1") >>> resultados = c.query_reports(reporte="Carta", returntype="tablestr") >>> print(resultados) +----------------------------------------------------+----------+----------------+-----------+--------------+-----------+---------------------+ | Nombre | Fecha | Departamento | Sistema | Aplicación | Páginas | Path | |----------------------------------------------------+----------+----------------+-----------+--------------+-----------+---------------------| | R8101614 - Cartas fianza activas por moneda y clas | 20160923 | n/a | n/a | n/a | 1 | test1\database.oerm | | R8101614 - Cartas fianza activas por moneda y clas | 20160923 | n/a | n/a | n/a | 20 | test1\database.oerm | | R8101611 - Cartas fianza requeridas por funcionari | 20160923 | n/a | n/a | n/a | 1 | test1\database.oerm | +----------------------------------------------------+----------+----------------+-----------+--------------+-----------+---------------------+ """ lista = [] SQL = """ Select distinct report.report_name, date.date, department.department_name, system.system_name, reports.pages, databases.path From reports Inner Join department On reports.department_id = department.department_id Inner Join date On reports.date_id = date.date_id Inner Join report On reports.report_id = report.report_id Inner Join system On reports.system_id = system.system_id Inner Join databases On reports.database_id = databases.database_id where 1 = 1 and report.report_name like ? and date.date like ? """ reporte = '%' if reporte is None else '%' + reporte + '%' sistema = '%' if sistema is None else '%' + sistema + '%' departamento = '%' if departamento is None else '%' + departamento + '%' fecha = '%' if fecha is None else '%' + fecha + '%' for dbname in self._current_repo.values(): conn = sqlite3.connect(dbname) c = conn.cursor() c.execute(SQL, ( reporte, fecha, )) lista.extend(c.fetchall()) c.close() conn.close() if returntype == "list": return lista if returntype == "tablestr": tablestr = tabulate( tabular_data=lista, headers=[ "Nombre", "Fecha", "Departamento", "Sistema", "Aplicación", "Páginas", "Path" ], floatfmt="8.2f", tablefmt="psql", numalign="right", stralign="left", override_cols_fmt=[None, None, None, None, None, ",.0f", None]) return tablestr return lista
def process_file(self, input_file): block = Block(default_compress_level=self.config.compress_level) resultados = [] self.input_file = input_file size_test_file = os.path.getsize(self.input_file) self.spool_types = { "fixed": SpoolFixedRecordLength(self.input_file, buffer_size=self.config.buffer_size, encoding=self.config.encoding, newpage_code=self.config.EOP), "fcfc": SpoolHostReprint(self.input_file, buffer_size=self.config.buffer_size, encoding=self.config.encoding) } compresiones = [ e for e in block.compressor.available_types if e[0] == self.config.compress_type ] encriptados = [ e for e in block.cipher.available_types if e[0] == self.config.cipher_type ] mode = "ab" r = ReportMatcher(self.config.report_cfg) reports = [] for encriptado in encriptados: for compress in compresiones: start = time.time() paginas = 0 # file_name = "{0}.{1}.oerm".format(self.config.output_path, slugify("{0}.{1}".format(compress[1], encriptado[1]), "_")) file_name = os.path.join( self.config.output_path, generate_filename(self.config.file_mask) + ".oerm") db = Database( file=file_name, mode=mode, default_compress_method=compress[0], default_compress_level=self.config.compress_level, default_encription_method=encriptado[0], pages_in_container=self.config.pages_in_group) file_size = os.path.getsize(file_name) reportname_anterior = "" widgets = [ os.path.basename(self.input_file), ': ', FormatLabel( '%(value)d bytes de %(max_value)d (%(percentage)0.2f)' ), Bar(marker='#', left='[', right=']'), ' ', ETA(), ' ', FileTransferSpeed() ] p_size = 0 with ProgressBar(max_value=size_test_file, widgets=widgets) as bar: spool = self.spool_types[self.config.file_type] with spool as s: for page in s: p_size += len(page) bar.update(p_size) data = r.match(page) reportname = data[0] if reportname not in reports: reports.append(reportname) if reportname != reportname_anterior: rpt_id = db.get_report(reportname) if rpt_id: db.set_report(reportname) else: db.add_report(reporte=reportname, sistema=data[1], departamento=data[2], fecha=data[3]) reportname_anterior = reportname paginas = paginas + 1 db.add_page(page) db.close() compress_time = time.time() - start compress_size = os.path.getsize(file_name) - file_size resultados.append([ "[{0}] {1} ({2}p/cont.)".format( compress[0], compress[1], self.config.pages_in_group), ("" if encriptado[0] == 0 else encriptado[1]), float(size_test_file), float(compress_size), (compress_size / size_test_file) * 100, paginas / compress_time, len(reports) ]) tablestr = tabulate(tabular_data=resultados, headers=[ "Algoritmo", "Encript.", "Real (bytes)", "Compr. (bytes)", "Ratio", "Compr. Pg/Seg", "Reportes" ], floatfmt="8.2f", tablefmt="psql", numalign="right", stralign="left", override_cols_fmt=[ None, None, ",.0f", ",.0f", ",.2f", ",.2f", ",.2f" ]) return tablestr