def setup_pac(logger, conf): """ Sets a pac adapter up as per configuration object """ support = { # Here you should subscribe any newer # adapter implementation (AKA pac adapter) 'servisim': dict(test=(Servisim, conf.test), real=(Servisim, conf.real)) } name = ProfileReader.get_content(conf.name, ProfileReader.PNODE_UNIQUE) mode = ProfileReader.get_content(conf.mode, ProfileReader.PNODE_UNIQUE) supplier = support.get(name.lower(), None) msg = None if supplier is not None: try: ic, settings = supplier[mode] return ic( logger, **dict_params( ProfileReader.get_content(settings, ProfileReader.PNODE_MANY), "param", "value")), msg except KeyError: msg = "Such pac mode is not supported" else: msg = "Such pac is not supported yet" return None, msg
def connect(conf): """opens a connection to postgresql""" conn_str = "dbname={0} user={1} host={2} password={3} port={4}".format( ProfileReader.get_content(conf.db, ProfileReader.PNODE_UNIQUE), ProfileReader.get_content(conf.user, ProfileReader.PNODE_UNIQUE), ProfileReader.get_content(conf.host, ProfileReader.PNODE_UNIQUE), ProfileReader.get_content(conf.passwd, ProfileReader.PNODE_UNIQUE), ProfileReader.get_content(conf.port, ProfileReader.PNODE_UNIQUE)) return psycopg2.connect(conn_str)
def connect(conf): """opens a connection to database""" try: conn_str = "dbname={0} user={1} host={2} password={3} port={4}".format( ProfileReader.get_content(conf.db, ProfileReader.PNODE_UNIQUE), ProfileReader.get_content(conf.user, ProfileReader.PNODE_UNIQUE), ProfileReader.get_content(conf.host, ProfileReader.PNODE_UNIQUE), ProfileReader.get_content(conf.passwd, ProfileReader.PNODE_UNIQUE), ProfileReader.get_content(conf.port, ProfileReader.PNODE_UNIQUE) ) return psycopg2.connect(conn_str) except: raise Exception('It is not possible to connect with database')
def read_settings(s_file): logger.debug("looking for config profile file in:\n{0}".format( os.path.abspath(s_file))) if os.path.isfile(s_file): reader = ProfileReader(logger) return reader(s_file) raise Exception("unable to locate the config profile file")
def run(self, b, output_file, **kwargs): """runs docmaker's pipeline to create a document""" try: self.logger.debug("attempting the import of {0} library".format(b)) doc_mod = __import__(b) if not hasattr(doc_mod, "impt_class"): msg = "module {0} has no impt_class attribute".format(b) raise DocBuilderImptError(msg) cname = getattr(doc_mod, "impt_class") if not hasattr(doc_mod, cname): msg = "module {0} has no {1} class implemented".format(b, cname) raise DocBuilderImptError(msg) self.builder = getattr(hw_mod, cname)(self.logger) if not isinstance(self.builder, BuilderGen) and not issubclass(self.builder.__class__, BuilderGen): msg = "unknown support library specification in {0}".format(self.builder) raise DocBuilderImptError(msg) except (ImportError, DocBuilderImptError) as e: self.logger.fatal("{0} support library failure".format(b)) raise e self.__create( self.__open_dbms_conn(), {p["name"]: p["value"] for p in ProfileReader.get_content( self.rdirs_info, ProfileReader.PNODE_MANY) }, output_file, **kwargs )
def fetch_rdirs(resdir, rdirs_conf): """Creates dict with resource directories of full path""" d_rdirs = { p["name"]: p["value"] for p in ProfileReader.get_content(rdirs_conf, ProfileReader.PNODE_MANY) } return {k: os.path.join(resdir, v) for k, v in d_rdirs.items()}
def __open_dbms_conn(self): """opens a connection to postgresql""" try: conn_str = "dbname={0} user={1} host={2} password={3} port={4}".format( ProfileReader.get_content(self.pgsql_conf.db, ProfileReader.PNODE_UNIQUE) ProfileReader.get_content(self.pgsql_conf.user, ProfileReader.PNODE_UNIQUE) ProfileReader.get_content(self.pgsql_conf.host, ProfileReader.PNODE_UNIQUE) ProfileReader.get_content(self.pgsql_conf.passwd, ProfileReader.PNODE_UNIQUE) ProfileReader.get_content(self.pgsql_conf.port, ProfileReader.PNODE_UNIQUE) ) return psycopg2.connect(conn_str) except psycopg2.Error as e: self.logger.error(e) raise DocBuilderError("dbms was not connected") except KeyError as e: self.logger.error(e) raise DocBuilderError("slack pgsql configuration")
def __incept_impt(self): """ load hw module required """ m = ProfileReader.get_content(self.ctrl_info.selected.mod_name, ProfileReader.PNODE_UNIQUE) def setup_kwargs(l): n = {} for d in l: n[d["name"]] = d["value"] return n try: self.logger.debug("attempting the import of {0} library".format(m)) hw_mod = __import__(m) if not hasattr(hw_mod, "impt_class"): msg = "module {0} has no impt_class attribute".format(m) raise CtrlModuleError(msg) cname = getattr(hw_mod, "impt_class") if not hasattr(hw_mod, cname): msg = "module {0} has no {1} class implemented".format( m, cname) raise CtrlModuleError(msg) self.model = getattr(hw_mod, cname)(self.logger, **setup_kwargs( ProfileReader.get_content(self.ctrl_info.selected.mod_params, ProfileReader.PNODE_MANY))) self.verify_model() except (ImportError, CtrlModuleError) as e: self.logger.fatal("{0} support library failure".format(m)) raise e
def getup_factory(logger, variants): devents = dict_params( ProfileReader.get_content( variants, ProfileReader.PNODE_MANY), 'archetype', 'event_mod') fact = Factory() for archetype, event_mod in devents.items(): try: m = __import__(event_mod) if not hasattr(m, "impt_class"): msg = "module {0} has no impt_class attribute".format(event_mod) raise RuntimeError(msg) cname = getattr(m, "impt_class") if not hasattr(m, cname): msg = "module {0} has no {1} class implemented".format(event_mod, cname) raise RuntimeError(msg) ic = getattr(m, cname) fact.subscribe(int(archetype, 0), ic) except (ImportError, RuntimeError) as e: logger.fatal("{0} support library failure".format(event_mod)) raise e return fact
def undonota(logger, pt, req): ncr_id = req.get('ncr_id', None) usr_id = req.get('usr_id', None) reason = req.get('reason', None) mode = req.get('mode', None) if reason is None: reason = '' if (ncr_id is None) or (usr_id is None) or (mode is None): return ErrorCode.REQUEST_INCOMPLETE.value def get_xml_name(): q = """select ref_id as filename FROM fac_nota_credito WHERE fac_nota_credito.id = {}""".format(ncr_id) for row in HelperPg.onfly_query(pt.dbms.pgsql_conn, q, True): # Just taking first row of query result return row['filename'] + '.xml' source = ProfileReader.get_content(pt.source, ProfileReader.PNODE_UNIQUE) resdir = os.path.abspath(os.path.join(os.path.dirname(source), os.pardir)) rdirs = fetch_rdirs(resdir, pt.res.dirs) _uuid = None _rfc = None try: _rfc = __get_emisor_rfc(logger, usr_id, pt.dbms.pgsql_conn) except: return ErrorCode.DBMS_SQL_ISSUES.value try: cfdi_dir = os.path.join(rdirs['cfdi_output'], _rfc) f_xml = os.path.join(cfdi_dir, get_xml_name()) logger.debug('File to cancel {}'.format(f_xml)) parser = SaxReader() xml_dat, _ = parser(f_xml) _uuid = xml_dat['UUID'] except: return ErrorCode.RESOURCE_NOT_FOUND.value rc = __pac_cancel(logger, _uuid, _rfc, pt.tparty.pac) if rc != ErrorCode.SUCCESS: return rc.value q_do = """select * from ncr_exec_cancel( {}::integer, {}::integer, '{}'::text, {}::integer ) AS result( rc integer, msg text ) """.format( # Store procedure parameters usr_id, # _usr_id ncr_id, # _ncr_id reason, # _reason mode # _mode ) rc = __run_sp_ra(logger, q_do, pt.dbms.pgsql_conn) return rc.value
def donota(logger, pt, req): def store(f_xml): parser = SaxReader() xml_dat, _ = parser(f_xml) q = """select * from ncr_save_xml( {}::integer, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::boolean, {}::integer ) AS result( rc integer, msg text ) """.format( # Store procedure parameters req.get('ncr_id', None), # _ncr_id os.path.basename(f_xml), # _file_xml xml_dat['CFDI_SERIE'], # _serie xml_dat['CFDI_FOLIO'], # _folio req.get('saldado', None), # _saldado req.get('usr_id', None) # _usr_id ) logger.debug("Performing query: {}".format(q)) try: res = HelperPg.onfly_query(pt.dbms.pgsql_conn, q, True) if len(res) != 1: raise Exception( 'unexpected result regarding execution of store') rcode, rmsg = res.pop() if rcode == 0: return ErrorCode.SUCCESS raise Exception(rmsg) except: logger.error(dump_exception()) return ErrorCode.DBMS_SQL_ISSUES logger.info("stepping in donota handler within {}".format(__name__)) filename = req.get('filename', None) source = ProfileReader.get_content(pt.source, ProfileReader.PNODE_UNIQUE) resdir = os.path.abspath(os.path.join(os.path.dirname(source), os.pardir)) rdirs = fetch_rdirs(resdir, pt.res.dirs) tmp_dir = tempfile.gettempdir() tmp_file = os.path.join(tmp_dir, HelperStr.random_str()) rc = __run_builder(logger, pt, tmp_file, resdir, 'ncrxml', usr_id=req.get('usr_id', None), nc_id=req.get('ncr_id', None)) if rc != ErrorCode.SUCCESS: pass else: _rfc = None try: _rfc = __get_emisor_rfc(logger, req.get('usr_id', None), pt.dbms.pgsql_conn) except: rc = ErrorCode.DBMS_SQL_ISSUES if rc == ErrorCode.SUCCESS: out_dir = os.path.join(rdirs['cfdi_output'], _rfc) rc, signed_file = __pac_sign(logger, tmp_file, filename, out_dir, pt.tparty.pac) if rc == ErrorCode.SUCCESS: rc = store(signed_file) if rc == ErrorCode.SUCCESS: rc = __run_builder(logger, pt, signed_file.replace('.xml', '.pdf'), resdir, 'ncrpdf', xml=signed_file, rfc=_rfc) if os.path.isfile(tmp_file): os.remove(tmp_file) return rc.value
def facturar(logger, pt, req): def fetch_empdat(usr_id): sql = """select upper(EMP.rfc) as rfc, EMP.no_id as no_id FROM gral_suc AS SUC LEFT JOIN gral_usr_suc AS USR_SUC ON USR_SUC.gral_suc_id = SUC.id LEFT JOIN gral_emp AS EMP ON EMP.id = SUC.empresa_id WHERE USR_SUC.gral_usr_id=""" q = "{0}{1}".format(sql, usr_id) logger.debug("Performing query: {}".format(q)) try: for row in HelperPg.onfly_query(pt.dbms.pgsql_conn, q): return ErrorCode.SUCCESS, dict(rfc=row['rfc'], no_id=row['no_id']) except: logger.error(dump_exception()) return ErrorCode.DBMS_SQL_ISSUES, None def store(f_xmlin, usr_id, prefact_id, no_id): parser = SaxReader() xml_dat, _ = parser(f_xmlin) ref_id = '{}_{}{}'.format(no_id, xml_dat['CFDI_SERIE'], xml_dat['CFDI_FOLIO']) q = """select fac_save_xml from fac_save_xml( '{}'::character varying, {}::integer, {}::integer, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, {}::double precision, {}::double precision, {}::double precision, {}::boolean )""".format( # Store procedure parameters os.path.basename(f_xmlin), # file_xml prefact_id, # prefact_id usr_id, # usr_id xml_dat['CFDI_DATE'].split('T')[0], # creation_date ref_id, # no_id_emp xml_dat['CFDI_SERIE'], # serie xml_dat['CFDI_FOLIO'], # folio 'THIS FIELD IS DEPRECATED', # items_str 'THIS FIELD IS DEPRECATED', # traslados_str 'THIS FIELD IS DEPRECATED', # retenciones_str xml_dat['INCEPTOR_REG'], # reg_fiscal 'THIS FIELD IS DEPRECATED', # pay_method xml_dat['INCEPTOR_CP'], # exp_place 'FACTURA', # proposito - It is obviously hardcoded 'THIS FIELD IS DEPRECATED', # no_aprob 'THIS FIELD IS DEPRECATED', # ano_aprob xml_dat['RECEPTOR_RFC'], # rfc_custm - RFC customer xml_dat['RECEPTOR_NAME'], # rs_custm - Razon social customer '0000', # account_number - An account fake number invented by me xml_dat['TAXES']['TRAS']['TOTAL'], # total_tras '0', # subtotal_with_desc xml_dat['CFDI_TOTAL'], # total 'false' # refact ) logger.debug("Performing query: {}".format(q)) try: s_out = None for row in HelperPg.onfly_query(pt.dbms.pgsql_conn, q, True): # Just taking first row of query result s_out = row['fac_save_xml'] break # here we should parse s_out line logger.debug( 'store procedure fac_save_xml has returned: {}'.format(s_out)) return ErrorCode.SUCCESS except: logger.error(dump_exception()) return ErrorCode.ETL_ISSUES logger.info("stepping in factura handler within {}".format(__name__)) filename = req.get('filename', None) source = ProfileReader.get_content(pt.source, ProfileReader.PNODE_UNIQUE) resdir = os.path.abspath(os.path.join(os.path.dirname(source), os.pardir)) rdirs = fetch_rdirs(resdir, pt.res.dirs) tmp_dir = tempfile.gettempdir() tmp_file = os.path.join(tmp_dir, HelperStr.random_str()) rc = __run_builder(logger, pt, tmp_file, resdir, 'facxml', usr_id=req.get('usr_id', None), prefact_id=req.get('prefact_id', None)) if rc == ErrorCode.SUCCESS: rc, inceptor_data = fetch_empdat(req.get('usr_id', None)) if rc == ErrorCode.SUCCESS: out_dir = os.path.join(rdirs['cfdi_output'], inceptor_data['rfc']) rc, outfile = __pac_sign(logger, tmp_file, filename, out_dir, pt.tparty.pac) if rc == ErrorCode.SUCCESS: rc = store(outfile, req.get('usr_id', None), req.get('prefact_id', None), inceptor_data['no_id']) if rc == ErrorCode.SUCCESS: rc = __run_builder( logger, pt, outfile.replace('.xml', '.pdf'), # We replace the xml extension resdir, 'facpdf', xml=outfile, rfc=inceptor_data['rfc']) if os.path.isfile(tmp_file): os.remove(tmp_file) return rc.value
def dopago(logger, pt, req): logger.info("stepping in dopago handler within {}".format(__name__)) filename = req.get('filename', None) usr_id = req.get('usr_id', None) pag_id = req.get('pag_id', None) if (pag_id is None) or (usr_id is None) or (filename is None): return ErrorCode.REQUEST_INCOMPLETE.value source = ProfileReader.get_content(pt.source, ProfileReader.PNODE_UNIQUE) resdir = os.path.abspath(os.path.join(os.path.dirname(source), os.pardir)) rdirs = fetch_rdirs(resdir, pt.res.dirs) tmp_dir = tempfile.gettempdir() tmp_file = os.path.join(tmp_dir, HelperStr.random_str()) def update_filename(): q = """UPDATE erp_pagos set aux_no_fac = '{}' WHERE erp_pagos.numero_transaccion = {}""".format( filename.replace('.xml', ''), pag_id) try: HelperPg.onfly_update(q) except: logger.error(dump_exception()) return ErrorCode.DBMS_SQL_ISSUES return ErrorCode.SUCCESS def update_consecutive_alpha(f_xmlin): parser = SaxReader() xml_dat, _ = parser(f_xmlin) q = """update fac_cfds_conf_folios set folio_actual = (folio_actual + 1) FROM gral_suc AS SUC LEFT JOIN fac_cfds_conf ON fac_cfds_conf.gral_suc_id = SUC.id LEFT JOIN gral_usr_suc AS USR_SUC ON USR_SUC.gral_suc_id = SUC.id WHERE fac_cfds_conf_folios.proposito = 'PAG' AND fac_cfds_conf_folios.fac_cfds_conf_id=fac_cfds_conf.id AND USR_SUC.gral_usr_id = {}""".format(usr_id) try: HelperPg.onfly_update(q) except: logger.error(dump_exception()) return ErrorCode.DBMS_SQL_ISSUES return ErrorCode.SUCCESS rc = __run_builder(logger, pt, tmp_file, resdir, 'pagxml', usr_id=usr_id, pag_id=pag_id) if rc != ErrorCode.SUCCESS: pass else: _rfc = None try: _rfc = __get_emisor_rfc(logger, req.get('usr_id', None)) except: rc = ErrorCode.DBMS_SQL_ISSUES if rc == ErrorCode.SUCCESS: out_dir = os.path.join(rdirs['cfdi_output'], _rfc) rc, signed_file = __pac_sign(logger, tmp_file, filename, out_dir, pt.tparty.pac) if rc == ErrorCode.SUCCESS: rc = update_filename() if rc == ErrorCode.SUCCESS: rc = update_consecutive_alpha(signed_file) if rc == ErrorCode.SUCCESS: rc = __run_builder(logger, pt, signed_file.replace('.xml', '.pdf'), resdir, 'pagpdf', xml=signed_file, rfc=_rfc) if os.path.isfile(tmp_file): os.remove(tmp_file) return rc.value
def facturar(logger, pt, req): def dm_exec(filename, resdir, usr_id, prefact_id): dm_builder = 'facxml' kwargs = {'usr_id': usr_id, 'prefact_id': prefact_id} try: dpl = DocPipeLine(logger, resdir, rdirs_conf=pt.res.dirs, pgsql_conf=pt.dbms.pgsql_conn) dpl.run(dm_builder, filename, **kwargs) return ErrorCode.SUCCESS except: logger.error(dump_exception()) return ErrorCode.DOCMAKER_ERROR def pac_sign(f, resdir): try: # Here it would be placed, code calling # the pac connector mechanism logger.debug('Getting a pac connector as per config profile') pac, err = setup_pac(logger, pt.tparty.pac) if pac is None: raise Exception(err) logger.debug('File to sign {}'.format(f)) with open(f) as t: signed = pac.stamp(t.read(), 'HARD_XID') logger.debug(signed) return ErrorCode.SUCCESS, f except: logger.error(dump_exception()) return ErrorCode.THIRD_PARTY_ISSUES def store(f): try: # Here it would be placed, code for # saving relevant info of newer cfdi in dbms logger.debug('saving relevant info of {} in dbms', f) return ErrorCode.SUCCESS except: logger.error(dump_exception()) return ErrorCode.ETL_ISSUES logger.info("stepping in factura handler within {}".format(__name__)) source = ProfileReader.get_content(pt.source, ProfileReader.PNODE_UNIQUE) resdir = os.path.abspath(os.path.join(os.path.dirname(source), os.pardir)) usr_id = req.get('usr_id', None) prefact_id = req.get('prefact_id', None) logger.debug('Using as resources directory {}'.format(resdir)) tmp_dir = tempfile.gettempdir() filename = os.path.join(tmp_dir, HelperStr.random_str()) rc = dm_exec(filename, resdir, usr_id, prefact_id) if rc == ErrorCode.SUCCESS: rc, outfile = pac_sign(filename, resdir) if rc == ErrorCode.SUCCESS: rc = store(outfile) return rc.value
def undofacturar(logger, pt, req): fact_id = req.get('fact_id', None) usr_id = req.get('usr_id', None) reason = req.get('reason', None) mode = req.get('mode', None) if reason is None: reason = '' if (fact_id is None) or (usr_id is None) or (mode is None): return ErrorCode.REQUEST_INCOMPLETE.value q_val = """select * from fac_val_cancel( {}::integer ) AS result( rc integer, msg text ) """.format( # Store procedure parameters fact_id # _fac_id ) q_do = """select * from fac_exec_cancel( {}::integer, {}::integer, '{}'::text, {}::integer ) AS result( rc integer, msg text ) """.format( # Store procedure parameters usr_id, # _usr_id fact_id, # _fac_id reason, # _reason mode # _mode ) def run_store(q): logger.debug("Performing query: {}".format(q)) res = HelperPg.onfly_query(pt.dbms.pgsql_conn, q, True) # For this case we are just expecting one row if len(res) != 1: raise Exception('unexpected result regarding execution of store') return res def check_result(r): rcode, rmsg = r.pop() if rcode != 0: raise Exception(rmsg) def get_xml_name(): q = """select ref_id as filename FROM fac_docs WHERE fac_docs.id=""" for row in HelperPg.onfly_query(pt.dbms.pgsql_conn, "{0}{1}".format(q, fact_id), True): # Just taking first row of query result return row['filename'] + '.xml' source = ProfileReader.get_content(pt.source, ProfileReader.PNODE_UNIQUE) resdir = os.path.abspath(os.path.join(os.path.dirname(source), os.pardir)) rdirs = fetch_rdirs(resdir, pt.res.dirs) _uuid = None _res = None _rfc = None try: _rfc = __get_emisor_rfc(logger, usr_id, pt.dbms.pgsql_conn) except: return ErrorCode.DBMS_SQL_ISSUES.value try: cfdi_dir = os.path.join(rdirs['cfdi_output'], _rfc) f_xml = os.path.join(cfdi_dir, get_xml_name()) logger.debug('File to cancel {}'.format(f_xml)) parser = SaxReader() xml_dat, _ = parser(f_xml) _uuid = xml_dat['UUID'] except: return ErrorCode.RESOURCE_NOT_FOUND.value rc = __run_sp_ra(logger, q_val, pt.dbms.pgsql_conn, tmode = False) if rc != ErrorCode.SUCCESS: return rc.value rc = __pac_cancel(logger, _uuid, _rfc, pt.tparty.pac) if rc != ErrorCode.SUCCESS: return rc.value rc = __run_sp_ra(logger, q_do, pt.dbms.pgsql_conn) return rc.value
def dopago(logger, pt, req): logger.info("stepping in dopago handler within {}".format(__name__)) filename = req.get('filename', None) usr_id = req.get('usr_id', None) pag_id = req.get('pag_id', None) if (pag_id is None) or (usr_id is None) or (filename is None): return ErrorCode.REQUEST_INCOMPLETE.value source = ProfileReader.get_content(pt.source, ProfileReader.PNODE_UNIQUE) resdir = os.path.abspath(os.path.join(os.path.dirname(source), os.pardir)) rdirs = fetch_rdirs(resdir, pt.res.dirs) tmp_dir = tempfile.gettempdir() tmp_file = os.path.join(tmp_dir, HelperStr.random_str()) def update_consecutive_alpha(f_xmlin): parser = SaxReader() xml_dat, _ = parser(f_xmlin) q = """update fac_cfds_conf_folios set folio_actual = (folio_actual + 1) FROM gral_suc AS SUC LEFT JOIN fac_cfds_conf ON fac_cfds_conf.gral_suc_id = SUC.id LEFT JOIN gral_usr_suc AS USR_SUC ON USR_SUC.gral_suc_id = SUC.id WHERE fac_cfds_conf_folios.proposito = 'PAG' AND fac_cfds_conf_folios.fac_cfds_conf_id=fac_cfds_conf.id AND USR_SUC.gral_usr_id = {}""".format(usr_id) try: HelperPg.onfly_update(pt.dbms.pgsql_conn, q) except: logger.error(dump_exception()) return ErrorCode.DBMS_SQL_ISSUES return ErrorCode.SUCCESS rc = __run_builder(logger, pt, tmp_file, resdir, 'pagxml', usr_id = usr_id, pag_id = pag_id) if rc != ErrorCode.SUCCESS: pass else: _rfc = None try: _rfc = __get_emisor_rfc(logger, req.get('usr_id', None), pt.dbms.pgsql_conn) except: rc = ErrorCode.DBMS_SQL_ISSUES if rc == ErrorCode.SUCCESS: out_dir = os.path.join(rdirs['cfdi_output'], _rfc) rc, signed_file = __pac_sign(logger, tmp_file, filename, out_dir, pt.tparty.pac) if rc == ErrorCode.SUCCESS: rc = update_consecutive_alpha(signed_file) if rc == ErrorCode.SUCCESS: rc = __run_builder(logger, pt, signed_file.replace('.xml', '.pdf'), resdir, 'pagpdf', xml = signed_file, rfc = _rfc) if os.path.isfile(tmp_file): os.remove(tmp_file) return rc.value def undofacturar(logger, pt, req): fact_id = req.get('fact_id', None) usr_id = req.get('usr_id', None) reason = req.get('reason', None) mode = req.get('mode', None) if reason is None: reason = '' if (fact_id is None) or (usr_id is None) or (mode is None): return ErrorCode.REQUEST_INCOMPLETE.value q_val = """select * from fac_val_cancel( {}::integer ) AS result( rc integer, msg text ) """.format( # Store procedure parameters fact_id # _fac_id ) q_do = """select * from fac_exec_cancel( {}::integer, {}::integer, '{}'::text, {}::integer ) AS result( rc integer, msg text ) """.format( # Store procedure parameters usr_id, # _usr_id fact_id, # _fac_id reason, # _reason mode # _mode ) def run_store(q): logger.debug("Performing query: {}".format(q)) res = HelperPg.onfly_query(pt.dbms.pgsql_conn, q, True) # For this case we are just expecting one row if len(res) != 1: raise Exception('unexpected result regarding execution of store') return res def check_result(r): rcode, rmsg = r.pop() if rcode != 0: raise Exception(rmsg) def get_xml_name(): q = """select ref_id as filename FROM fac_docs WHERE fac_docs.id=""" for row in HelperPg.onfly_query(pt.dbms.pgsql_conn, "{0}{1}".format(q, fact_id), True): # Just taking first row of query result return row['filename'] + '.xml' source = ProfileReader.get_content(pt.source, ProfileReader.PNODE_UNIQUE) resdir = os.path.abspath(os.path.join(os.path.dirname(source), os.pardir)) rdirs = fetch_rdirs(resdir, pt.res.dirs) _uuid = None _res = None _rfc = None try: _rfc = __get_emisor_rfc(logger, usr_id, pt.dbms.pgsql_conn) except: return ErrorCode.DBMS_SQL_ISSUES.value try: cfdi_dir = os.path.join(rdirs['cfdi_output'], _rfc) f_xml = os.path.join(cfdi_dir, get_xml_name()) logger.debug('File to cancel {}'.format(f_xml)) parser = SaxReader() xml_dat, _ = parser(f_xml) _uuid = xml_dat['UUID'] except: return ErrorCode.RESOURCE_NOT_FOUND.value rc = __run_sp_ra(logger, q_val, pt.dbms.pgsql_conn, tmode = False) if rc != ErrorCode.SUCCESS: return rc.value rc = __pac_cancel(logger, _uuid, _rfc, pt.tparty.pac) if rc != ErrorCode.SUCCESS: return rc.value rc = __run_sp_ra(logger, q_do, pt.dbms.pgsql_conn) return rc.value def facturar(logger, pt, req): def fetch_empdat(usr_id): sql = """select upper(EMP.rfc) as rfc, EMP.no_id as no_id FROM gral_suc AS SUC LEFT JOIN gral_usr_suc AS USR_SUC ON USR_SUC.gral_suc_id = SUC.id LEFT JOIN gral_emp AS EMP ON EMP.id = SUC.empresa_id WHERE USR_SUC.gral_usr_id=""" q = "{0}{1}".format(sql, usr_id) logger.debug("Performing query: {}".format(q)) try: for row in HelperPg.onfly_query(pt.dbms.pgsql_conn, q): return ErrorCode.SUCCESS, dict(rfc=row['rfc'], no_id=row['no_id']) except: logger.error(dump_exception()) return ErrorCode.DBMS_SQL_ISSUES, None def store(f_xmlin, usr_id, prefact_id, no_id): parser = SaxReader() xml_dat, _ = parser(f_xmlin) ref_id = '{}_{}{}'.format(no_id, xml_dat['CFDI_SERIE'], xml_dat['CFDI_FOLIO']) q = """select fac_save_xml from fac_save_xml( '{}'::character varying, {}::integer, {}::integer, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::character varying, {}::double precision, {}::double precision, {}::double precision, {}::boolean, '{}'::character varying )""".format( # Store procedure parameters os.path.basename(f_xmlin), # file_xml prefact_id, # prefact_id usr_id, # usr_id xml_dat['CFDI_DATE'].split('T')[0], # creation_date ref_id, # no_id_emp xml_dat['CFDI_SERIE'], # serie xml_dat['CFDI_FOLIO'], # folio 'THIS FIELD IS DEPRECATED', # items_str 'THIS FIELD IS DEPRECATED', # traslados_str 'THIS FIELD IS DEPRECATED', # retenciones_str xml_dat['INCEPTOR_REG'], # reg_fiscal 'THIS FIELD IS DEPRECATED', # pay_method xml_dat['INCEPTOR_CP'], # exp_place 'FACTURA', # proposito - It is obviously hardcoded 'THIS FIELD IS DEPRECATED', # no_aprob 'THIS FIELD IS DEPRECATED', # ano_aprob xml_dat['RECEPTOR_RFC'], # rfc_custm - RFC customer xml_dat['RECEPTOR_NAME'], # rs_custm - Razon social customer '0000', # account_number - An account fake number invented by me xml_dat['TAXES']['TRAS']['TOTAL'], # total_tras '0', # subtotal_with_desc xml_dat['CFDI_TOTAL'], # total 'false', # refact xml_dat['UUID'] # id de documento - It came from SAT timbrado throughout PAC ) logger.debug("Performing query: {}".format(q)) try: s_out = None for row in HelperPg.onfly_query(pt.dbms.pgsql_conn, q, True): # Just taking first row of query result s_out = row['fac_save_xml'] break # here we should parse s_out line logger.debug('store procedure fac_save_xml has returned: {}'.format(s_out)) return ErrorCode.SUCCESS except: logger.error(dump_exception()) return ErrorCode.ETL_ISSUES logger.info("stepping in factura handler within {}".format(__name__)) filename = req.get('filename', None) source = ProfileReader.get_content(pt.source, ProfileReader.PNODE_UNIQUE) resdir = os.path.abspath(os.path.join(os.path.dirname(source), os.pardir)) rdirs = fetch_rdirs(resdir, pt.res.dirs) tmp_dir = tempfile.gettempdir() tmp_file = os.path.join(tmp_dir, HelperStr.random_str()) rc = __run_builder(logger, pt, tmp_file, resdir, 'facxml', usr_id = req.get('usr_id', None), prefact_id = req.get('prefact_id', None)) if rc == ErrorCode.SUCCESS: rc, inceptor_data = fetch_empdat(req.get('usr_id', None)) if rc == ErrorCode.SUCCESS: out_dir = os.path.join(rdirs['cfdi_output'], inceptor_data['rfc']) rc, outfile = __pac_sign(logger, tmp_file, filename, out_dir, pt.tparty.pac) if rc == ErrorCode.SUCCESS: rc = store(outfile, req.get('usr_id', None), req.get('prefact_id', None), inceptor_data['no_id']) if rc == ErrorCode.SUCCESS: rc = __run_builder(logger, pt, outfile.replace('.xml', '.pdf'), # We replace the xml extension resdir, 'facpdf', xml = outfile, rfc = inceptor_data['rfc']) if os.path.isfile(tmp_file): os.remove(tmp_file) return rc.value def donota(logger, pt, req): def store(f_xml): parser = SaxReader() xml_dat, _ = parser(f_xml) q = """select * from ncr_save_xml( {}::integer, '{}'::character varying, '{}'::character varying, '{}'::character varying, '{}'::boolean, {}::integer ) AS result( rc integer, msg text ) """.format( # Store procedure parameters req.get('ncr_id', None), # _ncr_id os.path.basename(f_xml), # _file_xml xml_dat['CFDI_SERIE'], # _serie xml_dat['CFDI_FOLIO'], # _folio req.get('saldado', None), # _saldado req.get('usr_id', None) # _usr_id ) logger.debug("Performing query: {}".format(q)) try: res = HelperPg.onfly_query(pt.dbms.pgsql_conn, q, True) if len(res) != 1: raise Exception('unexpected result regarding execution of store') rcode, rmsg = res.pop() if rcode == 0: return ErrorCode.SUCCESS raise Exception(rmsg) except: logger.error(dump_exception()) return ErrorCode.DBMS_SQL_ISSUES logger.info("stepping in donota handler within {}".format(__name__)) filename = req.get('filename', None) source = ProfileReader.get_content(pt.source, ProfileReader.PNODE_UNIQUE) resdir = os.path.abspath(os.path.join(os.path.dirname(source), os.pardir)) rdirs = fetch_rdirs(resdir, pt.res.dirs) tmp_dir = tempfile.gettempdir() tmp_file = os.path.join(tmp_dir, HelperStr.random_str()) rc = __run_builder(logger, pt, tmp_file, resdir, 'ncrxml', usr_id = req.get('usr_id', None), nc_id = req.get('ncr_id', None)) if rc != ErrorCode.SUCCESS: pass else: _rfc = None try: _rfc = __get_emisor_rfc(logger, req.get('usr_id', None), pt.dbms.pgsql_conn) except: rc = ErrorCode.DBMS_SQL_ISSUES if rc == ErrorCode.SUCCESS: out_dir = os.path.join(rdirs['cfdi_output'], _rfc) rc, signed_file = __pac_sign(logger, tmp_file, filename, out_dir, pt.tparty.pac) if rc == ErrorCode.SUCCESS: rc = store(signed_file) if rc == ErrorCode.SUCCESS: rc = __run_builder(logger, pt, signed_file.replace('.xml', '.pdf'), resdir, 'ncrpdf', xml = signed_file, rfc = _rfc) if os.path.isfile(tmp_file): os.remove(tmp_file) return rc.value def undonota(logger, pt, req): ncr_id = req.get('ncr_id', None) usr_id = req.get('usr_id', None) reason = req.get('reason', None) mode = req.get('mode', None) if reason is None: reason = '' if (ncr_id is None) or (usr_id is None) or (mode is None): return ErrorCode.REQUEST_INCOMPLETE.value def get_xml_name(): q = """select ref_id as filename FROM fac_nota_credito WHERE fac_nota_credito.id = {}""".format(ncr_id) for row in HelperPg.onfly_query(pt.dbms.pgsql_conn, q, True): # Just taking first row of query result return row['filename'] + '.xml' source = ProfileReader.get_content(pt.source, ProfileReader.PNODE_UNIQUE) resdir = os.path.abspath(os.path.join(os.path.dirname(source), os.pardir)) rdirs = fetch_rdirs(resdir, pt.res.dirs) _uuid = None _rfc = None try: _rfc = __get_emisor_rfc(logger, usr_id, pt.dbms.pgsql_conn) except: return ErrorCode.DBMS_SQL_ISSUES.value try: cfdi_dir = os.path.join(rdirs['cfdi_output'], _rfc) f_xml = os.path.join(cfdi_dir, get_xml_name()) logger.debug('File to cancel {}'.format(f_xml)) parser = SaxReader() xml_dat, _ = parser(f_xml) _uuid = xml_dat['UUID'] except: return ErrorCode.RESOURCE_NOT_FOUND.value rc = __pac_cancel(logger, _uuid, _rfc, pt.tparty.pac) if rc != ErrorCode.SUCCESS: return rc.value q_do = """select * from ncr_exec_cancel( {}::integer, {}::integer, '{}'::text, {}::integer ) AS result( rc integer, msg text ) """.format( # Store procedure parameters usr_id, # _usr_id ncr_id, # _ncr_id reason, # _reason mode # _mode ) rc = __run_sp_ra(logger, q_do, pt.dbms.pgsql_conn) return rc.value