def __run_sp_ra(logger, q, pgsql_conf, tmode=True): """Runs a store procedure with rich answer""" def run_store(): logger.debug("Performing query: {}".format(q)) r = HelperPg.onfly_query(pgsql_conf, q, True) # For this case we are just expecting one row if len(r) != 1: raise Exception('unexpected result regarding execution of store') return r def check_result(r): rcode, rmsg = r.pop() if rcode != 0: raise Exception(rmsg) _res = None try: _res = run_store() except: logger.error(dump_exception()) return ErrorCode.DBMS_SQL_ISSUES try: check_result(_res) except: logger.error(dump_exception()) if tmode: return ErrorCode.DBMS_TRANS_ERROR else: return ErrorCode.REQUEST_INVALID return ErrorCode.SUCCESS
def __pac_sign(logger, f_xmlin, xid, out_dir, pac_conf): """ Signs xml with pac connector mechanism """ try: logger.debug('Getting a pac connector as per config profile') pac, err = setup_pac(logger, pac_conf) if pac is None: raise Exception(err) logger.debug('File to sign {}'.format(f_xmlin)) s_signed = None with open(f_xmlin) as f: s_signed = pac.stamp(f.read(), xid) logger.debug(s_signed) f_xmlout = os.path.join(out_dir, xid) logger.debug('saving pac xml signed upon {}'.format(f_xmlout)) with open(f_xmlout, "w") as f: f.write(s_signed) return ErrorCode.SUCCESS, f_xmlout except: logger.error(dump_exception()) return ErrorCode.THIRD_PARTY_ISSUES, None
def store(f_xml): parser = SaxReader() xml_dat, _ = parser(f_xml) q = """select * from ncr_save_xml( {}::integer, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::boolean, {}::integer ) AS result( rc integer, msg text ) """.format( # Store procedure parameters req.get('ncr_id', None), # _ncr_id os.path.basename(f_xml), # _file_xml xml_dat['CFDI_SERIE'], # _serie xml_dat['CFDI_FOLIO'], # _folio req.get('saldado', None), # _saldado req.get('usr_id', None) # _usr_id ) logger.debug("Performing query: {}".format(q)) try: res = HelperPg.onfly_query(pt.dbms.pgsql_conn, q, True) if len(res) != 1: raise Exception( 'unexpected result regarding execution of store') rcode, rmsg = res.pop() if rcode == 0: return ErrorCode.SUCCESS raise Exception(rmsg) except: logger.error(dump_exception()) return ErrorCode.DBMS_SQL_ISSUES
def do_request(logger, pt, req, adapter=None): """""" def apply_adapter(): if adapter is not None: return adapter() # So we assumed request are bytes of a json string json_lines = req.decode(encoding='UTF-8') return json.loads(json_lines) d = apply_adapter() try: business_mod = d['request']['to'] action = d['request']['action'] args = d['request']['args'] m = __import__(business_mod) if not hasattr(m, action): msg = "module {0} has no handler {1}".format(business_mod, action) raise RuntimeError(msg) handler = getattr(m, action) return handler(logger, pt, args) except (ImportError, RuntimeError) as e: logger.fatal("support module failure {}".format(e)) return ErrorCode.MOD_BUSINESS_NOT_LOADED.value except: logger.error(dump_exception()) return ErrorCode.MOD_BUSINESS_UNEXPECTED_FAIL.value
def __run_builder(logger, pt, f_outdoc, resdir, dm_builder, **kwargs): try: dpl = DocPipeLine(logger, resdir, rdirs_conf=pt.res.dirs) dpl.run(dm_builder, f_outdoc, **kwargs) return ErrorCode.SUCCESS except: logger.error(dump_exception()) return ErrorCode.DOCMAKER_ERROR
def store(f_xmlin, usr_id, prefact_id, no_id): parser = SaxReader() xml_dat, _ = parser(f_xmlin) ref_id = '{}_{}{}'.format(no_id, xml_dat['CFDI_SERIE'], xml_dat['CFDI_FOLIO']) q = """select fac_save_xml from fac_save_xml( '{}'::character varying, {}::integer, {}::integer, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, {}::double precision, {}::double precision, {}::double precision, {}::boolean, '{}'::character varying )""".format( # Store procedure parameters os.path.basename(f_xmlin), # file_xml prefact_id, # prefact_id usr_id, # usr_id xml_dat['CFDI_DATE'].split('T')[0], # creation_date ref_id, # no_id_emp xml_dat['CFDI_SERIE'], # serie xml_dat['CFDI_FOLIO'], # folio 'THIS FIELD IS DEPRECATED', # items_str 'THIS FIELD IS DEPRECATED', # traslados_str 'THIS FIELD IS DEPRECATED', # retenciones_str xml_dat['INCEPTOR_REG'], # reg_fiscal 'THIS FIELD IS DEPRECATED', # pay_method xml_dat['INCEPTOR_CP'], # exp_place 'FACTURA', # proposito - It is obviously hardcoded 'THIS FIELD IS DEPRECATED', # no_aprob 'THIS FIELD IS DEPRECATED', # ano_aprob xml_dat['RECEPTOR_RFC'], # rfc_custm - RFC customer xml_dat['RECEPTOR_NAME'], # rs_custm - Razon social customer '0000', # account_number - An account fake number invented by me xml_dat['TAXES']['TRAS']['TOTAL'], # total_tras '0', # subtotal_with_desc xml_dat['CFDI_TOTAL'], # total 'false', # refact xml_dat[ 'UUID'] # id de documento - It came from SAT timbrado throughout PAC ) logger.debug("Performing query: {}".format(q)) try: s_out = None for row in HelperPg.onfly_query(q, True): # Just taking first row of query result s_out = row['fac_save_xml'] break # here we should parse s_out line logger.debug( 'store procedure fac_save_xml has returned: {}'.format(s_out)) return ErrorCode.SUCCESS except: logger.error(dump_exception()) return ErrorCode.ETL_ISSUES
def store(f): try: # Here it would be placed, code for # saving relevant info of newer cfdi in dbms logger.debug('saving relevant info of {} in dbms', f) return ErrorCode.SUCCESS except: logger.error(dump_exception()) return ErrorCode.ETL_ISSUES
def update_filename(): q = """UPDATE erp_pagos set aux_no_fac = '{}' WHERE erp_pagos.numero_transaccion = {}""".format(filename.replace('.xml', ''), pag_id) try: HelperPg.onfly_update(pt.dbms.pgsql_conn, q) except: logger.error(dump_exception()) return ErrorCode.DBMS_SQL_ISSUES return ErrorCode.SUCCESS
def dm_exec(filename, resdir, usr_id, prefact_id): dm_builder = 'facxml' kwargs = {'usr_id': usr_id, 'prefact_id': prefact_id} try: dpl = DocPipeLine(logger, resdir, rdirs_conf=pt.res.dirs, pgsql_conf=pt.dbms.pgsql_conn) dpl.run(dm_builder, filename, **kwargs) return ErrorCode.SUCCESS except: logger.error(dump_exception()) return ErrorCode.DOCMAKER_ERROR
def __pac_cancel(logger, t, rfc, pac_conf): try: logger.debug('Getting a pac connector as per config profile') pac, err = setup_pac(logger, pac_conf) if pac is None: raise Exception(err) s_cancel = pac.cancel(t, rfc) logger.debug(s_cancel) return ErrorCode.SUCCESS except: logger.error(dump_exception()) return ErrorCode.THIRD_PARTY_ISSUES
def fetch_empdat(usr_id): sql = """select upper(EMP.rfc) as rfc, EMP.no_id as no_id FROM gral_suc AS SUC LEFT JOIN gral_usr_suc AS USR_SUC ON USR_SUC.gral_suc_id = SUC.id LEFT JOIN gral_emp AS EMP ON EMP.id = SUC.empresa_id WHERE USR_SUC.gral_usr_id=""" q = "{0}{1}".format(sql, usr_id) logger.debug("Performing query: {}".format(q)) try: for row in HelperPg.onfly_query(pt.dbms.pgsql_conn, q): return ErrorCode.SUCCESS, dict(rfc=row['rfc'], no_id=row['no_id']) except: logger.error(dump_exception()) return ErrorCode.DBMS_SQL_ISSUES, None
def conn_delegate(self, conns_queue, profile_path, queue, configurer, debug): """deals with an active connection""" configurer(queue, debug) name = multiprocessing.current_process().name logger = logging.getLogger(name) conn = None def read_socket(s): d = conn.recv(s) if d == b'': raise RuntimeError("socket connection broken") return d read_header = lambda: read_socket(Frame.FRAME_HEADER_LENGTH) read_body = lambda hs: read_socket(hs) while True: try: conn = conns_queue.get() logger.debug('Taking a connection from queue with {}'.format( conns_queue.qsize())) logger.debug( 'File descriptor for this connection is {}'.format( conn.fileno())) factory = ControllerFactory(logger, profile_path) mon = Monitor(logger, conn, factory) logger.debug("Monitor is ready") while True: mon.receive( Action(read_body(Frame.decode_header(read_header())))) except RuntimeError as e: logger.info(e) except FrameError as e: logger.exception(e) except KeyboardInterrupt: # SIGINT is masked in the child processes. # that's why this workaround is required # to exit reliably logger.debug('Finishing worker {}'.format(name)) break except: logger.error(dump_exception()) finally: if conn is not None: logger.debug("Closing socket") conn.close() conns_queue.task_done()
def pac_sign(f, resdir): try: # Here it would be placed, code calling # the pac connector mechanism logger.debug('Getting a pac connector as per config profile') pac, err = setup_pac(logger, pt.tparty.pac) if pac is None: raise Exception(err) logger.debug('File to sign {}'.format(f)) with open(f) as t: signed = pac.stamp(t.read(), 'HARD_XID') logger.debug(signed) return ErrorCode.SUCCESS, f except: logger.error(dump_exception()) return ErrorCode.THIRD_PARTY_ISSUES
def update_consecutive_alpha(f_xmlin): parser = SaxReader() xml_dat, _ = parser(f_xmlin) q = """update fac_cfds_conf_folios set folio_actual = (folio_actual + 1) FROM gral_suc AS SUC LEFT JOIN fac_cfds_conf ON fac_cfds_conf.gral_suc_id = SUC.id LEFT JOIN gral_usr_suc AS USR_SUC ON USR_SUC.gral_suc_id = SUC.id WHERE fac_cfds_conf_folios.proposito = 'PAG' AND fac_cfds_conf_folios.fac_cfds_conf_id=fac_cfds_conf.id AND USR_SUC.gral_usr_id = {}""".format(usr_id) try: HelperPg.onfly_update(q) except: logger.error(dump_exception()) return ErrorCode.DBMS_SQL_ISSUES return ErrorCode.SUCCESS
return psr.parse_args() if __name__ == "__main__": args = parse_cmdline() RESOURCES_DIR = '{}/resources'.format(expanduser("~")) PROFILES_DIR = '{}/profiles'.format(RESOURCES_DIR) DEFAULT_PROFILE = 'default.json' profile_path = '{}/{}'.format( PROFILES_DIR, args.config if args.config else DEFAULT_PROFILE) logger = setup_logger(args.debug) logger.debug(args) try: pt = read_settings(logger, profile_path) incept_prefact(logger, pt, args.user_id, args.cust_id) logger.debug('successful super prefact execution') except: if args.debug: print('Whoops! a problem came up!') print(dump_exception()) sys.exit(1) # assuming everything went right, exit gracefully sys.exit(0)