def main(): state = 'draft' from_date = '2017-01-01' to_date = '2017-12-31' step("Getting drafts moves") draft_move_ids = get_draft_moves_ids(state, from_date, to_date) step("There are {} account moves to validate".format(len(draft_move_ids))) if draft_move_ids: step("Do you want to continue? (Y/n)") answer = raw_input() while answer.lower() not in ['y', 'n', '']: answer = raw_input() step("Do you want to continue? (Y/n)") if answer in ['n', 'N']: raise KeyboardInterrupt draft_move_ids_gen = (tuple(draft_move_ids[i:i + MAX_ELEMS]) for i in range(0, len(draft_move_ids), MAX_ELEMS)) res = validate_moves(draft_move_ids_gen) failed = { move_ids: result for move_ids, result in res.iteritems() if result is False } while failed: warn("There were failed validation, tring again") res = validate_moves(failed) failed = { elem: result for elem, result in res.iteritems() if result is False } success("Done!")
def get_nif_from_csv(config): ''' Find the NIF for each 'soci' that are in a csv file. ''' fetcher = SheetFetcher( documentName = config.filename, credentialFilename = 'CredencialsBaixaSocis.json', ) load_dateI = [row[0].encode('utf-8') for row in fetcher.get_range(config.sheet, config.intervalCells4DateI)] load_dateF = [row[0].encode('utf-8') for row in fetcher.get_range(config.sheet, config.intervalCells4DateF)] load_nif = [row[0].encode('utf-8') for row in fetcher.get_range(config.sheet, config.intervalCells4DNI)] load_nif = [re.sub('[^a-zA-Z0-9]+', '', nif).upper() for nif in load_nif] load_Check = [row[0].encode('utf-8') for row in fetcher.get_range(config.sheet, config.intervalCells4Check)] load_Check = [re.sub('[^a-zA-Z]+', '', i).upper() for i in load_Check] load_name = [row[0].encode('utf-8') for row in fetcher.get_range(config.sheet, config.intervalCells4Name)] name = zip(load_nif, load_name, load_Check) load_data = zip(load_nif, load_dateF) load_real_nif, wrong_nif = get_real_nif(load_nif, name, 'DNI_a_verificar_migracion.csv') warn("There are {} DNI that are duplicated and {} that are misinformed, from the initial {} in {}", len(load_nif) - len(set(load_nif)), len(wrong_nif), len(load_nif), config.filename) return load_real_nif, load_data
def execute_sql(dbcur, sql_query): try: dbcur.execute(sql_query) except Exception, ex: warn('Failed executing query') warn(sql_query) raise ex
def get_data_from_erp(queryfile, filename): ''' Find the 'socis' from ERP that need a cancelation date. ''' with io.open(queryfile) as f: query = f.read() step("Connecting to the ERP database...") db = psycopg2.connect(**configdb.psycopg) with db.cursor() as cursor: try: cursor.execute(query) except KeyError as e: fail("Missing variable '{key}'. Specify it in the YAML file or by using the --{key} option" .format(key=e.args[0])) erp_data = nsList(cursor) erp_data = [dict(data) for data in erp_data] erp_count_duplicated = collections.Counter(i['nif'] for i in erp_data) erp_duplicated = collections.Counter({elem: count for elem, count in erp_count_duplicated.iteritems() if count > 1 }) warn("There are {} duplicated records:{}", len(erp_duplicated), erp_duplicated) erp_nif = set(i['nif'] for i in erp_data) erp_ref = set(i['ref_cliente'] for i in erp_data) erp_vat = [i['vat'] for i in erp_data] erp_categoria = [i['categoria'] for i in erp_data] erp_name = [i['name'] for i in erp_data] step("Saving ERP data in {}, there are {} cases that meet the migration criteria" , filename, len(erp_nif)) with open(filename, "w") as loadsocis: writer = csv.writer(loadsocis, delimiter = "\t") writer.writerows( zip(erp_vat, erp_ref, erp_categoria, erp_name)) return erp_nif, erp_data
def duplicateds_by_cups(duplicateds, mongo_db, mongo_collection, doit=False): total_deleted = 0 count_different_values = 0 entry_example = False for entry in duplicateds: try: if not 'name' in entry['_id']: continue to_delete = not is_winter_hour_change(entry['_id']['datetime']) if to_delete: cr = list(mongo_db[mongo_collection].find({'_id':{'$in': entry['uniqueIds']}})) informed_ai = [x['ai'] for x in cr if 'ai' in x] if len(informed_ai) != len(cr): warn("Element/s sense ai", cr) continue if len(set(informed_ai)) == 1: total_deleted += len(cr)-1 if doit: del_res = mongo_db[mongo_collection].delete_many({'_id':{'$in': entry['uniqueIds'][1:]}}) else: count_different_values += 1 entry_example = entry['_id'] #warn("Repetits amb diferents valors ai ", cr) except KeyboardInterrupt as e: break except Exception as e: error("Error: {}".format(e)) if count_different_values: warn("Trobats {} repetits amb diferents valors, per exemple CUPS {}, diahora {}".format( count_different_values, entry_example['name'], entry_example['datetime']) ) return total_deleted
def send_contract_erp_email(pol_id): step("Enviant correus a la polissa id {}", pol_id) if doit: send_erp_email(pol_id, 'giscedata.polissa', EMAIL_TEMPLATE_ID, EMAIL_FROM_ACCOUNT) else: warn("email sending disabled, set --doit.")
def change_to_tg(pol_ids): success('') success('Modificant polisses:') res = ns() totals = len(pol_ids) for counter, pol_id in enumerate(pol_ids): polissa = ns( pol_obj.read(pol_id, [ "name", "no_estimable", "observacions", "observacions_estimacio", ])) step("{}/{} polissa {}".format(counter + 1, totals, polissa.name)) header = "[{}] ".format(str(datetime.today())[:19]) if polissa.observacions: polissa.observacions = polissa.observacions.encode("utf-8") changes = { "observacions": header + missatge + "\n\n" + (polissa.observacions or ""), "observacions_estimacio": header + missatge, "no_estimable": True, } res[pol_id] = changes if doit: pol_obj.write(pol_id, changes) warn("modificat") return res
def queryOne(aQuery, **vars): many = query(aQuery, **vars) if len(many) is not 1: warn("S'esperava un sol registre i s'han trobat {}\n" "A la query:\n{}\nAmb:\n{}" .format(len(many),aQuery,ns(vars).dump())) return many[0]
def get_ref_from_csv(config): ''' Find socis' code number from csv file. ''' fetcher = SheetFetcher( documentName = config.filename, credentialFilename = 'CredencialsBaixaSocis.json', ) load_dateI = [row[0].encode('utf-8') for row in fetcher.get_range(config.sheet, config.intervalCells4DateI)] load_dateF = [row[0].encode('utf-8') for row in fetcher.get_range(config.sheet, config.intervalCells4DateF)] load_refsoci = [row[0].encode('utf-8') for row in fetcher.get_range(config.sheet, config.intervalCells4DNI)] load_Check = [unidecode(row[0]).encode('ascii') for row in fetcher.get_range(config.sheet, config.intervalCells4Check)] load_Check = [re.sub('[^a-zA-Z]+', '', i).upper() for i in load_Check] load_refsoci = [re.sub('[^0-9]+', '', ref) for ref in load_refsoci] load_refsoci = [('{num:06d}'.format(num=int(i))) for i in load_refsoci if i] load_refsoci = ['{}{}'.format( 'S', soci) for soci in load_refsoci] baixa_erp = zip(load_dateI, load_dateF, load_refsoci, load_Check) check_refsoci = len(load_refsoci) load_refsoci = set(load_refsoci) refsoci, refdata = get_date(baixa_erp) warn("There are {} soci number that are duplicated, from the initial {} in {}", check_refsoci - len(load_refsoci), check_refsoci, config.filename) return refsoci, refdata
def migrate_socis(config, query, output): ''' Migrates cancelation date from both excels into ERP In case there are socis missing from both excels, save their data in a file for hand correction. ''' step("Get DNI from {} drive", config.newDrive.filename) load_DNI, load_data = get_nif_from_csv(config.newDrive) load_erp_nif, load_erp_data = get_data_from_erp(query, output) socis2migrate = list(set.intersection(load_erp_nif, load_DNI)) step("There are {} to migrate from {}", len(socis2migrate), config.newDrive.filename) soci, data_baixa = validate_dates(load_data, socis2migrate, "corregir_fechas_Baixa_socis.csv") update_ERP(configdb, soci, data_baixa, "vat", config.newDrive.filename) othersocis = load_erp_nif.difference(load_DNI) othersocis_ref = [i['ref_cliente'] for i in load_erp_data if 'nif' in i if i['nif'] in othersocis] step("Get DNI from {}, that are in ERP but not in {}", config.oldDrive.filename , config.newDrive.filename) load_ref, data_ref = get_ref_from_csv(config.oldDrive) socis2migrate_ref = set.intersection(set(load_ref), set(othersocis_ref)) soci_ref, data_baixa_ref = validate_dates(data_ref, socis2migrate_ref, "corregir_fechas_Baixes_de_soci.csv") step("There are {} to migrate from {}", len(socis2migrate_ref), config.oldDrive.filename) update_ERP(configdb, soci_ref, data_baixa_ref, "ref", config.oldDrive.filename) missing_socis = set(othersocis_ref).difference(socis2migrate_ref) warn("There are {} 'socis' missing from both excels and need to be updated.", len(missing_socis)) step("Saving missing 'socis' into {}", "hand_migration.csv") with open("hand_migration.csv", "w") as loadsocis: writer = csv.writer(loadsocis, delimiter = "\t") writer.writerows(missing_socis)
def generate_and_save_pdf(id, name): try: pdf_data = generate_inv_pdf(id) step("saving pdf to {}", name) save_file(name, pdf_data) success("done!") except Exception as e: warn(str(e))
def main(): options = ns() optarg = None cliargs = ns() keyarg = None args = [] for arg in sys.argv[1:]: if keyarg: cliargs[keyarg] = eval(arg) if arg.startswith("(") else arg keyarg = None continue if optarg: options[optarg] = arg optarg = None continue if arg.startswith('--'): keyarg = arg[2:] continue if arg.startswith('-'): optarg = arg[1:] continue args.append(arg) if not args: fail( "Argument required. Usage:\n" "{} <sqlfile> [-C <dbconfig.py>] [<yamlfile>] [--<var1> <value1> [--<var2> <value2> ..] ]" .format(sys.argv[0])) step("Loading {}...".format(args[0])) with open(args[0]) as sqlfile: query = sqlfile.read() variables = ns() if len(args) >= 2: step("Loading variables...".format(args[1])) variables = ns.load(args[1]) warn(variables.dump()) variables.update(cliargs) if 'C' in options: import imp config = imp.load_source('config', options.C) else: import config step("Connecting to the database...") db = psycopg2.connect(**config.psycopg) with db.cursor() as cursor: try: cursor.execute(query, variables) except KeyError as e: fail( "Missing variable '{key}'. Specify it in the YAML file or by using the --{key} option" .format(key=e.args[0])) print dbutils.csvTable(cursor)
def main(): res = update_general_conditions() failed_contracts = [contract for contract in res if not contract[0]] if failed_contracts: contract_list = [contract[1] for contract in res] msg = "Achtung!! There folowing contracts failed:\n - {}" warn(msg.format(", ".join(contract_list))) success("Updated {} contracts".format(str(len(res))))
def exist_draft_invoices_polissa(polissa): existDrafInvoice_ids = get_draft_invoices_from_polissa(polissa) if existDrafInvoice_ids: warn("El contracte {id} ja tenia {n} factures en esborrany", n=len(existDrafInvoice_ids), **polissa) result.contractsWithPreviousDraftInvoices.append(polissa.id) return existDrafInvoice_ids
def main(): options = ns() optarg = None cliargs = ns() keyarg = None args = [] for arg in sys.argv[1:]: if keyarg: cliargs[keyarg]=eval(arg) if arg.startswith("(") else arg keyarg=None continue if optarg: options[optarg]=arg optarg=None continue if arg.startswith('--'): keyarg = arg[2:] continue if arg.startswith('-'): optarg = arg[1:] continue args.append(arg) if not args: fail("Argument required. Usage:\n" "{} <sqlfile> [-C <dbconfig.py>] [<yamlfile>] [--<var1> <value1> [--<var2> <value2> ..] ]".format(sys.argv[0])) step("Loading {}...".format(args[0])) with open(args[0]) as sqlfile: query = sqlfile.read() variables = ns() if len(args)>=2: step("Loading variables...".format(args[1])) variables = ns.load(args[1]) warn(variables.dump()) variables.update(cliargs) if 'C' in options: import imp config=imp.load_source('config',options.C) else: import config step("Connecting to the database...") db = psycopg2.connect(**config.psycopg) with db.cursor() as cursor : try: cursor.execute(query, variables) except KeyError as e: fail("Missing variable '{key}'. Specify it in the YAML file or by using the --{key} option" .format(key=e.args[0])) print dbutils.csvTable(cursor)
def search_invoice_by_id(invoice_id): step("Cerquem la factura... {}", invoice_id) fact_ids = fact_obj.search([('id', '=', invoice_id)]) if len(fact_ids) == 0: warn("Cap factura trobada amb aquest id!!!!") return None if len(fact_ids) > 1: warn("Multiples factures trobades!! {}", fact_ids) return None fact_id = fact_ids[0] step("Factura amb ID {} existeix", fact_id) return fact_id
def search_invoice_by_name(invoice_number): step("Cerquem la factura...{}", invoice_number) fact_ids = fact_obj.search([('number', '=', invoice_number)]) if len(fact_ids) == 0: warn("Cap factura trobada amb aquest numero!!") return None if len(fact_ids) > 1: warn("Multiples factures trobades!! {}", fact_ids) return None fact_id = fact_ids[0] step("Factura amb ID {} trobada", fact_id) return fact_id
def search_invoice_by_ids(invoice_ids): ret_ids = [] invoice_ids = [int(i) for i in invoice_ids.split(',')] for invoice_id in invoice_ids: step("Cerquem la factura...", invoice_id) fact_ids = fact_obj.search([('id', '=', invoice_id)]) if len(fact_ids) == 0: warn("Cap factura trobada amb aquest id!!") if len(fact_ids) > 1: warn("Multiples factures trobades!! {}", fact_ids) ret_ids.append(fact_ids[0]) step("Factura amb ID {} existeix", fact_ids[0]) return ret_ids
def get_polissa_ids_from_csv(filename): pol_ids = [] for contract_name in read_data_from_csv(filename): pol_obj = O.GiscedataPolissa pol_id = pol_obj.search([('name', '=', contract_name.contracte)]) if len(pol_id) > 1: warn("Multiples resultats per polissa {} : {}", contract_name.contracte, pol_id) elif len(pol_id) == 0: warn("Sense resultats per polissa {}", contract_name.contracte) else: pol_ids.extend(pol_id) return pol_ids
def main(doit=False): mongo_client = pymongo.MongoClient(**configdb.mongodb) mongo_db = mongo_client.somenergia erpclient = Client(**configdb.erppeek) cups_names = get_cups_names(erpclient) step("Trobats {} CUPS".format(len(cups_names))) for col in ['tg_cchfact', 'tg_cchval']: step("Tractant la coŀlecció {}".format(col)) treat_duplicateds(mongo_db, col, cups_names, doit) if not doit: warn("S'ha executat sense el doit: mode consulta")
def search_polissa_by_names(polissa_names): ret_ids = [] for polissa_name in polissa_names: step("Cerquem la polissa...", polissa_name) pol_ids = pol_o.search([('name', '=', polissa_name)], context={'active_test': False}) if len(pol_ids) == 0: warn("Cap polissa trobada amb aquest id!!") elif len(pol_ids) > 1: warn("Multiples polisses trobades!! {}", pol_ids) else: ret_ids.append(pol_ids[0]) step("Polissa amb ID {} existeix", pol_ids[0]) return ret_ids
def search_known_errors(res, fact_id): errors = [] for k in known_errors.keys(): if k in res: errors.append(known_errors[k]) if errors: success("S'ha trobat {} possible(s) error(s):", len(errors)) for error in errors: success("POSSIBLE ERROR >> {}", error) else: warn("Error no reconegut, fes captura i obre incidència!!") step("Traça interna de l'error:") step(res) return errors
def tmpchanges(context): """Control uncleaned temporary files""" current = set(Path('/tmp').glob('*')) tmpchanges.initial = tmpchanges.initial if hasattr(tmpchanges, 'initial') else current previous = tmpchanges.previous if hasattr(tmpchanges, 'previous') else set() added = current - previous removed = previous - current tmpchanges.previous = current if not added and not removed: return if tmpchanges.initial == current: return warn("{}: Temporary files left behind:\n{}", context, '\n'.join([ "+ {}".format(tmp) for tmp in added] + [ "- {}".format(tmp) for tmp in removed] + [ " {}".format(tmp) for tmp in current-tmpchanges.initial-added] + [ ]))
def sendmail2all(user, attachment, email): ''' Sends csv by email ''' warn('User info: {}'.format(user)) dest = user['recipients'] + email.split(',') sendMail( sender = user['sender'], to = dest, bcc = user['bcc'], subject = "[Analisi Indexada] Disponibilitat Corbes ", md = "Hola, Us fem arribar el csv que heu demanat :)", attachments = [attachment], config = 'configdb.py', )
def get_contract_info(O, contract_number): try: contract_id = O.GiscedataPolissa.search([('name', 'ilike', contract_number)]) if not contract_id: raise Exception("Contract {} not found".format(contract_number)) assert len(contract_id) <= 1, "More than one contract, I don't now what to do :(" contract_info = ns(O.GiscedataPolissa.read(contract_id[0])) except IndexError as e: msg = "There where some problem getting contract information of " \ "contract {}, reason: {}" warn(msg.format(contract_number, str(e))) raise e else: return contract_info
def janitor_execution(config): ''' For each janitor defined in the yaml file checks for incoherent data and send it by email ''' for name, janitor in config.items(): if not janitor.get('active', False): warn("Skipping janitor: {name}", name=name) continue step("Running janitor: {description}", **janitor) if janitor.get('query', True): erp_data = get_data_from_erp(janitor.sql) allData, filename = process_records(erp_data, janitor.output) if allData: sendmail2all(janitor, filename) else: os.system(janitor.python)
def test_pool_bimensual(periode_ids, metter_id, polissa_id, res): for periode_id in periode_ids: last_distri_pool_A_ids = pool_obj.search( [ ('comptador', '=', metter_id), ('periode', '=', periode_id), ('tipus', '=', 'A'), ('origen_id', 'in', (1, 2, 3, 4, 5, 6, 12)) # real ], limit=10, order="name DESC") last_distri_pool_B_ids = pool_obj.search( [ ('comptador', '=', metter_id), ('periode', '=', periode_id), ('tipus', '=', 'A'), ('origen_id', '=', 7), #estimada ('origen_comer_id', 'in', (2, 7)) # de distri ], limit=10, order="name DESC") last_distri_pool_ids = last_distri_pool_A_ids + last_distri_pool_B_ids if len(last_distri_pool_ids) < lectures_pool_minimes: warn("menys de {} lectures de DISTRI , trobades {}".format( lectures_pool_minimes, len(last_distri_pool_ids))) res.too_few_distri_measures.append(polissa_id) return False last_distri_pool_measures = pool_obj.read(last_distri_pool_ids, ['name']) last_distri_pool_dates = sorted([ isodate(measure['name']) for measure in last_distri_pool_measures ], reverse=True)[:6] days = [(first - second).days for first, second in zip( last_distri_pool_dates, last_distri_pool_dates[1:])] if min(days) > llindar_bimensual: warn("possible polissa amb lectures bimensuals {}", days) res.bimensual_distri_readings.append(polissa_id) return False return True
def generateRequestSummaries(self, root): if not self.canvis: return solicitudes = self.element(root, 'SolicitudesRealizadas') for keys, canvi in sorted(self.canvis.iteritems()): provincia, distribuidora, tipoCambio, tipoPunto, tipoTarifa = keys if any(key is None for key in keys): warn("A key has a None value ({}):\n{}".format( keys, canvi.dump())) continue datos = self.element(solicitudes, 'DatosSolicitudes') self.element(datos, 'Provincia', provincia + '000') self.element(datos, 'Distribuidor', distribuidora) self.element(datos, 'Comer_entrante', self.CodigoAgente) self.element(datos, 'Comer_saliente', '0') self.element(datos, 'TipoCambio', tipoCambio) self.element(datos, 'TipoPunto', tipoPunto) # TODO self.element(datos, 'TarifaATR', self._fareCodes[tipoTarifa]) self.element(datos, 'TotalSolicitudesEnviadas', canvi.get('sent', 0)) self.element(datos, 'SolicitudesAnuladas', canvi.get('cancelled', 0)) self.element(datos, 'Reposiciones', 0) # TODO: No ben definit self.element(datos, 'ClientesSalientes', canvi.get('dropouts', 0)) self.element(datos, 'NumImpagados', 0) # TODO: No ben definit if 'pendents' in canvi: self.generatePendingDetails(datos, canvi.pendents) if 'accepted' in canvi: self.generateAcceptedDetails(datos, canvi.accepted) if 'rejected' in canvi: for rejected in canvi.rejected: self.generateRejectedDetails(datos, rejected) if 'activationPending' in canvi: self.generateActivationPendingDetails(datos, canvi.activationPending) if 'activated' in canvi: self.generateActivated(datos, canvi.activated)
def accept(datapath, back2BackCases, archSpecific=False, cases=[]) : remainingCases = cases[:] for case, command, outputs in back2BackCases : if cases and case not in cases : continue if cases : remainingCases.remove(case) for output in outputs : extension = os.path.splitext(output)[-1] base = prefix(datapath, case, output) badResult = badResultName(base, extension) if not os.access(badResult, os.R_OK) : continue warn("Accepting {}".format(badResult)) if archSpecific : os.rename(badResult, expectedArchName(base, extension)) else : os.rename(badResult, expectedName(base, extension)) if remainingCases : warn("No such test cases: {}".format( ", ".join("'%s'"%case for case in remainingCases)))
def validate_draft_invoices(polissa, generated_invoice_ids): generated_invoice_ids.reverse() success("\tFactures generades: {}", generated_invoice_ids) step("\tValidem factures creades") ko = False for draft_invoice_id in generated_invoice_ids: step("\t - Validant factura {}", draft_invoice_id) validation_warnings = Validator.validate_invoice(draft_invoice_id) for validation_warning in validation_warnings: v_warning_text = warning.read(validation_warning, ['message', 'name']) if v_warning_text['name'] != DELAYED_CONTRACT_WARNING_TEXT: ko = True # validation error warn(" · {} {}", (v_warning_text['name']).encode('utf-8'), (v_warning_text['message']).encode('utf-8')) if ko: result.contractsValidationError.append(polissa.id) return ko
def runBack2BackProgram(datapath, argv, back2BackCases, help=help, extensions={}) : "--help" not in argv or fail(help, 0) architectureSpecific = "--arch" in argv if architectureSpecific : argv.remove("--arch") os.access( datapath, os.X_OK ) or fail( "Datapath at '%s' not available. "%datapath + "Check the back 2 back script on information on how to obtain it.") availableCases = [case for case, command, outputs in back2BackCases] if "--list" in argv : sys.stdout.write("Available cases:\n") sys.stdout.write(_caseList(availableCases)) sys.exit() if "--accept" in argv : cases = argv[argv.index("--accept")+1:] cases or fail( "Option --accept needs a set of cases to accept.\n" "Available cases:\n"+ _caseList((case for case, command, outputs in back2BackCases)) ) unsupportedCases = set(cases).difference(set(availableCases)) unsupportedCases and fail( "The following specified cases are not available:\n" + _caseList(unsupportedCases) + "Try with:\n" + _caseList(availableCases) ) accept(datapath, back2BackCases, architectureSpecific, cases) sys.exit() if "--acceptall" in argv : warn("Accepting any faling case") accept(datapath, back2BackCases, architectureSpecific) sys.exit() passB2BTests(datapath, back2BackCases, extensions=extensions) or fail("Tests not passed")
def generateRequestSummaries(self, root): if not self.canvis : return solicitudes = self.element(root, 'SolicitudesRealizadas') for keys, canvi in sorted(self.canvis.iteritems()): provincia, distribuidora, tipoCambio, tipoPunto, tipoTarifa = keys if any(key is None for key in keys): warn("A key has a None value ({}):\n{}".format( keys, canvi.dump())) continue datos = self.element(solicitudes, 'DatosSolicitudes') self.element(datos, 'Provincia', provincia+'000') self.element(datos, 'Distribuidor', distribuidora) self.element(datos, 'Comer_entrante', self.CodigoAgente) self.element(datos, 'Comer_saliente', '0') self.element(datos, 'TipoCambio', tipoCambio) self.element(datos, 'TipoPunto', tipoPunto) # TODO self.element(datos, 'TarifaATR', self._fareCodes[tipoTarifa]) self.element(datos, 'TotalSolicitudesEnviadas', canvi.get('sent',0)) self.element(datos, 'SolicitudesAnuladas', canvi.get('cancelled',0)) self.element(datos, 'Reposiciones', 0) # TODO: No ben definit self.element(datos, 'ClientesSalientes', canvi.get('dropouts',0)) self.element(datos, 'NumImpagados', 0) # TODO: No ben definit if 'pendents' in canvi : self.generatePendingDetails(datos, canvi.pendents) if 'accepted' in canvi : self.generateAcceptedDetails(datos, canvi.accepted) if 'rejected' in canvi : for rejected in canvi.rejected : self.generateRejectedDetails(datos, rejected) if 'activationPending' in canvi : self.generateActivationPendingDetails(datos, canvi.activationPending) if 'activated' in canvi : self.generateActivated(datos, canvi.activated)
def crea_contractes(uri, filename): O = OOOP_WST(**configdb.ooop) contract_petitions = read_contracts_data_csv(filename) for petition in contract_petitions: msg = "Creating contract for vat {}, soci {}, CUPS {}" step(msg, petition['contract_owner']['vat'], petition['member_number'], petition['cups']) try: status, reason, text = add_contract(uri, petition) # comprovació manual ja que sempre retorna 200 el webforms if len(O.GiscedataPolissa.search([('cups', '=', petition['cups']) ])) == 0: raise requests.exceptions.HTTPError( "Error en resposta del webforms") except requests.exceptions.HTTPError as e: msg = "I couldn\'t create a new contract for cups {}, reason {}" if 'cups exist' in e.message: warn(msg, petition['cups'], e) else: error(msg, petition['cups'], e) success("S'ha creat un nou contracte pel CUPS {}".format( petition['cups']))
def delete_2001(test=True): step("I will start with 2001 erros (this will be an odyssey)") warn_xmls = [] importaciones_2001_f1 = get_f1s(error_code='2001') if test: warn("Test is ON!!") importaciones_2001_f1 = importaciones_2001_f1[:5] step("There are {} xml to delete".format(len(importaciones_2001_f1))) cnt = 0 for xml_imported in importaciones_2001_f1: try: step("Checking xml {}".format(xml_imported.name)) provider_bill = get_provider_bill_by_origin( xml_imported.invoice_number_text) if same_bill_dates(xml_imported, provider_bill): msg = "Xml {} has already provider bill with origin {}, i will delete it" success( msg.format(xml_imported.name, xml_imported.invoice_number_text)) xml_imported.unlink() cnt += 1 else: msg = "Provider bill with origin {} doesn't match with xml" warn(msg.format(provider_bill.reference, xml_imported.id)) warn_xmls.append(xml_imported) except Exception as e: msg = "I couldn\'t get provider bill with origin {}" warn(msg.format(xml_imported.invoice_number_text)) warn_xmls.append(xml_imported) step("{} xmls deleted".format(cnt)) if warn_xmls: msg = "- Fitxer: {}\n\t Id: {}\n\t CUPS: {}\n\t Distri: {}\n\t Origen: {}" warn("\n".join([ msg.format(xml.name, xml.id, xml.cups_text, xml.distribuidora, xml.invoice_number_text) for xml in warn_xmls ]))
def debugCase(db, caseId, impersonate=False): processIds = idsProcessos(db) processNames = {v:k for k,v in processIds.items()} with db.cursor() as cur : def query(aQuery, **vars): cur.execute(aQuery, vars) return nsList(cur) def queryOne(aQuery, **vars): many = query(aQuery, **vars) if len(many) is not 1: warn("S'esperava un sol registre i s'han trobat {}\n" "A la query:\n{}\nAmb:\n{}" .format(len(many),aQuery,ns(vars).dump())) return many[0] def queryOneOrEmpty(aQuery, **vars): many = query(aQuery, **vars) if not many: return ns() if len(many) is not 1: warn("S'esperava un sol registre o cap i s'han trobat {}\n" "A la query:\n{}\nAmb:\n{}" .format(len(many),aQuery,ns(vars).dump())) return many[0] case = queryOneOrEmpty( 'SELECT * FROM giscedata_switching WHERE id=%(caseid)s', caseid = caseId, ) case.process_name = processNames[case.proces_id] case.steps = query("""\ SELECT * FROM giscedata_switching_step_header WHERE sw_id=%(caseid)s ORDER BY create_date """, caseid = caseId, ) for step in case.steps : maybeSteps = idsPasses(db, processNames[case.proces_id]) step.details = [] for maybeStepName, maybeStepId in maybeSteps.items(): details = query("""\ SELECT * FROM giscedata_switching_{} WHERE header_id=%(id)s """.format(maybeStepName.lower()), id = step.id ) if not details: continue if impersonate: for detail in details: impersonatePersonalData(detail) step._stepname = maybeStepName step.details += details if not step.details : warn("No s'han trobat detalls del pas") if len(step.details)>1: warn("Més d'un detall pel pas") step.details = step.details[0] case.case = queryOne("""\ SELECT * FROM crm_case WHERE id=%(crm)s """, crm=case.case_id, ) case.polissa = queryOne("""\ SELECT * FROM giscedata_polissa WHERE id=%(polissaid)s """, polissaid=case.cups_polissa_id, ) if impersonate: impersonatePersonalData(case.polissa) case.polissa.observacions = 'bla bla bla' return case
'Recommended shares', 'Covered use', 'Recommended investment', 'Already invested', 'Unknown use', 'Small use', 'Is Partner', ]) for line in dbutils.fetchNs(cursor) : try: totalUse = line.consumannual if totalUse is None: warn("Soci {} amb consum null".format( line.nsoci)) totalUse = 0 # continue if totalUse * recommendedPercent < shareUse * 100 : error("El soci {} no te prou consum ({})".format(line.nsoci, totalUse)) # continue if line.nif[:2] != 'ES': warn("Soci amb un VAT code no espanyol: {}".format(line.nif[:2])) recommendedShares = (totalUse*recommendedPercent/100) // shareUse recommendedInvestment = recommendedShares * shareCost print '\t'.join( str(x)
annual_use_kwh = cups['conany_kwh'], ) if cups else dict() for contracte, cups in ( (contracte, O.GiscedataCupsPs.read(contracte['cups'][0], ['conany_kwh'])) if contracte['cups'] else (contracte, None) for contracte in O.GiscedataPolissa.read( contractes, ['name','cups','cups_direccio']) ) ] if not len(consums) : error("El soci {} no te contractes".format(soci_id)) continue if any('annual_use_kwh' not in contract for contract in consums) : warn("El soci {} te un contracte sense consum anual calculat".format(soci_id)) continue shareUse = 170 recommendedPercent = 70 shareCost = 100 def ambPuntDeMilers(numero) : return '{:,}'.format(numero).replace(',','.') totalUse = sum((contract.get('annual_use_kwh',0) for contract in consums)) recommendedShares = (totalUse*recommendedPercent/100) // shareUse recommendedInvestment = recommendedShares * shareCost if totalUse < shareUse : error("El soci {} no te prou consum ({})".format(soci_id, totalUse))