def duplicateds_by_cups(duplicateds, mongo_db, mongo_collection, doit=False): total_deleted = 0 count_different_values = 0 entry_example = False for entry in duplicateds: try: if not 'name' in entry['_id']: continue to_delete = not is_winter_hour_change(entry['_id']['datetime']) if to_delete: cr = list(mongo_db[mongo_collection].find({'_id':{'$in': entry['uniqueIds']}})) informed_ai = [x['ai'] for x in cr if 'ai' in x] if len(informed_ai) != len(cr): warn("Element/s sense ai", cr) continue if len(set(informed_ai)) == 1: total_deleted += len(cr)-1 if doit: del_res = mongo_db[mongo_collection].delete_many({'_id':{'$in': entry['uniqueIds'][1:]}}) else: count_different_values += 1 entry_example = entry['_id'] #warn("Repetits amb diferents valors ai ", cr) except KeyboardInterrupt as e: break except Exception as e: error("Error: {}".format(e)) if count_different_values: warn("Trobats {} repetits amb diferents valors, per exemple CUPS {}, diahora {}".format( count_different_values, entry_example['name'], entry_example['datetime']) ) return total_deleted
def setPlantParameters(plantname, **data): step("Setting parameters for plant '{plantname}'") with orm.db_session: plant = Plant.get(name=plantname) if not plant: error(f"Plant '{plantname}' not found") return if PlantModuleParameters.exists(plant=plant.id): if not force: error(f"Plant '{plant.name}' already has parameters, use --force to overwrite") return warn("Forcing removal of previous plant parameters") oldparams = PlantModule[plant.id] out(ns(oldparams.as_dict()).dump()) oldparams.delete() data = ns(data) params = PlantModuleParameters( plant=plant, n_modules = data.nModules, max_power_current_ma = int(data.Imp*1000), max_power_voltage_mv = int(data.Vmp*1000), current_temperature_coefficient_mpercent_c = int(data.temperatureCoefficientI*1000), voltage_temperature_coefficient_mpercent_c = int(data.temperatureCoefficientV*1000), standard_conditions_irradiation_w_m2 = int(data.irradiationSTC), standard_conditions_temperature_dc = int(data.temperatureSTC*10), degradation_cpercent = int(data.degradation*100), opencircuit_voltage_mv = int(data.Voc*1000), shortcircuit_current_ma = int(data.Isc*1000), expected_power_correction_factor_cpercent = int(data.get('correctionFactorPercent', 100)*100), )
def archive_members_from_list(list_name, email_list): list_id = get_mailchimp_list_id(list_name) operations = [] for email in email_list: operation = { "method": "DELETE", "path": "/lists/{list_id}/members/{subscriber_hash}".format( list_id=list_id, subscriber_hash=get_subscriber_hash(email)), "operation_id": email, } operations.append(operation) payload = {"operations": operations} try: response = MAILCHIMP_CLIENT.batches.start(payload) except ApiClientError as error: msg = "An error occurred an archiving batch request, reason: {}" error(msg.format(error.text)) else: batch_id = response['id'] while response['status'] != 'finished': time.sleep(2) response = MAILCHIMP_CLIENT.batches.status(batch_id) step("Archived operation finished!!") step( "Total operations: {}, finished operations: {}, errored operations: {}" .format(response['total_operations'], response['finished_operations'], response['errored_operations'])) result_summary = requests.get(response['response_body_url']) result_summary.raise_for_status() return result_summary.content
def diff_files(expected, result, diffbase) : if not os.access(result, os.R_OK): error("Result file not found: {}".format(result)) return ["Result was not generated: '%s'"%result] if not os.access(expected, os.R_OK): error("Expectation file not found for: {}".format(result)) return ["No expectation for the output. First run? " "Check the results and accept them with the --accept option."] return diffbyextension(expected, result, diffbase)
def get_polissa_id_from_polissa_name(polissas_file): polissas_names = [] polissas_id = [] with open(polissas_file, 'r') as f: polissas_names = f.readlines() polissas_names = [x.strip() for x in polissas_names] for pol in polissas_names: try: polissa_id = O.GiscedataPolissa.search([('name', '=', pol)])[0] polissas_id.append(polissa_id) except Exception as e: error("ERROR la polissa {} no ha estat trobada l'ERP", pol) error(unicode(e)) return polissas_id
def set_general_condition(polissa, sem): step("Setting general conditions for contract: {}".format(polissa.name)) conditions = get_general_condition(polissa.titular.lang) with sem: try: polissa.condicions_generals_id = conditions res = True, polissa.name except Exception as e: msg = "An error ocurred setting general conditions for "\ "contract {}, reason: {}" error(msg.format(polissa.name, str(e))) res = False, polissa.name return res
def copy_f1_to_testing(csv_file, date_from, polissa_name, server): client_prod = Client(**configdb.erppeek) if server == 'perp01': client_test = Client(**configdb.erppeek_perp01) else: client_test = Client(**configdb.erppeek_testing) if not polissa_name: polissa_names = read_polissa_names(csv_file, client_prod) else: polissa_names = [polissa_name] polissa_ids = search_polissa_by_names(polissa_names, client_prod) info = [] total_pols_ok = 0 for pol_info in client_prod.GiscedataPolissa.read(polissa_ids, ['name']): pol_id = pol_info['id'] pol_name = pol_info['name'] try: f1_prod_info = get_f1_info(client=client_prod, pol_id=pol_id) f1_test_info = get_f1_info(client=client_test, pol_id=pol_id) to_import_f1, to_replace_att = get_f1_import_replace_att( f1_prod_info, f1_test_info, date_from) if len(f1_prod_info) < len(f1_test_info) + len(to_import_f1): txt = "hi ha algun F1 a testing que no hi és a real. No s'hi actua" error("Per la pòlissa {} {}".format(pol_name, txt)) info.append({'pol': pol_name, 'info': txt}) continue replace_att(to_replace_att, client_prod, client_test) import_f1_to_testing(to_import_f1, client_prod, client_test) txt = "importats {} F1 i {} adjunts actualitzats".format( len(to_import_f1), len(to_replace_att)) step("Per la pòlissa {} {}".format(pol_name, txt)) info.append({'pol': pol_name, 'info': txt}) total_pols_ok += 1 except Exception as e: error("Error en la pòlissa {}".format(pol_name)) info.append({'pol': pol_name, 'info': "error inesperat"}) success("S'ha encuat la importació dels fitxers de {} pòlisses".format( total_pols_ok)) return info
def validate_moves(draft_move_ids_list): ''' draft_move_ids_list: Iterable of lists of move ids to validate ''' res = {} sem = Semaphore() with futures.ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor: to_do = { executor.submit(validate_account_move, move_ids, sem): move_ids for move_ids in draft_move_ids_list } for task in futures.as_completed(to_do): try: res[to_do[task]] = task.result() except Exception as e: msg = "An error ocurred validating acount move with "\ "id {}, reason: {}" error(msg.format(to_do[task], str(e))) res[to_do[task]] = False return res
def process_records(erp_data, pfilename): ''' This function retrieves the incoherent data from ERP ''' erp_data = [dict(data) for data in erp_data] filename = pfilename + datetime.now().strftime("%Y%m%d") + '.csv' if erp_data: error("\tThere are {} incoherent records in {}", len(erp_data), datetime.now().strftime("%Y-%m-%d")) error("\tSaving incoherent data in {}", filename) header = {key for d in erp_data for key in d.keys()} with open(filename, "w") as loadsocis: writer = csv.DictWriter(loadsocis, header) writer.writeheader() writer.writerows(erp_data) else: success( "\tPerfect! There is nothing to do! No incoherent records found in {}", datetime.now().strftime("%Y-%m-%d")) return erp_data, filename
def update_general_conditions(): res = [] sem = Semaphore() polissa_list = Polissa.browse( [('condicions_generals_id', '=', False)], 0, 0, False, {'active_test': False} ) step("There are {} polissas to update".format(len(polissa_list))) with futures.ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor: to_do = [ executor.submit(set_general_condition, polissa, sem) for polissa in polissa_list ] for task in futures.as_completed(to_do): try: res.append(task.result()) except Exception as e: msg = "An error ocurred task {}, reason: {}" error(msg.format(task, str(e))) return res
def crea_contractes(uri, filename): O = OOOP_WST(**configdb.ooop) contract_petitions = read_contracts_data_csv(filename) for petition in contract_petitions: msg = "Creating contract for vat {}, soci {}, CUPS {}" step(msg, petition['contract_owner']['vat'], petition['member_number'], petition['cups']) try: status, reason, text = add_contract(uri, petition) # comprovació manual ja que sempre retorna 200 el webforms if len(O.GiscedataPolissa.search([('cups', '=', petition['cups']) ])) == 0: raise requests.exceptions.HTTPError( "Error en resposta del webforms") except requests.exceptions.HTTPError as e: msg = "I couldn\'t create a new contract for cups {}, reason {}" if 'cups exist' in e.message: warn(msg, petition['cups'], e) else: error(msg, petition['cups'], e) success("S'ha creat un nou contracte pel CUPS {}".format( petition['cups']))
def __init__(self, documentName, credentialFilename): from oauth2client.service_account import ServiceAccountCredentials try: credentials = ServiceAccountCredentials.from_json_keyfile_name( credentialFilename, scopes=['https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive',], ) except Exception as e: fail(str(e)) gc = gspread.authorize(credentials) try: self.doc = gc.open(documentName) except: credentialContent = json.load(io.open(credentialFilename)) error("No s'ha trobat el document, o no li has donat permisos a l'aplicacio") error("Cal compartir el document '{}' amb el següent correu:" .format(documentName,credentialContent['client_email'])) error(str(e)) sys.exit(-1)
def __init__(self, documentName, credentialFilename): from oauth2client.service_account import ServiceAccountCredentials try: credentials = ServiceAccountCredentials.from_json_keyfile_name( credentialFilename, scopes=[ 'https://spreadsheets.google.com/feeds', ], ) except Exception as e: fail(str(e)) gc = gspread.authorize(credentials) try: self.doc = gc.open(documentName) except: error( "No s'ha trobat el document, o no li has donat permisos a l'aplicacio" ) error( "Cal compartir el document '{}' amb el següent correu:".format( documentName, json_key['client_email'])) error(str(e)) sys.exit(-1)
p['name'], potencia_activa, potencia) def get_dades_from_csv(): with open('dades_id_polissa_potencia_dist.csv') as csv_file: data = [] for row in csv_file: data_line = row.rstrip().split('\t') data.append(data_line) return data def main(): step("Get all potenciadist from {} drive", config.document['filename']) get_id_polissa_potencia_dist_from_drive() data = get_dades_from_csv() update_dades_erp(data) if __name__ == '__main__': try: config = ns.load("configdoc.yaml") except: error("Check configdoc.yaml") raise main()
start_date = args['startdate'] end_date = args['enddate'] contract_name = args['contractname'] def valid_date(date_text): try: datetime.strptime(date_text, '%Y-%m-%d') except ValueError: raise ValueError("Incorrect data format, should be YYYY-MM-DD") return True O = None try: O = OOOP(**configdb.ooop) except: error("Unable to connect to ERP") raise if not contract_name: error("Contracte name missing") raise start_date = start_date if start_date and valid_date(start_date) else None end_date = end_date if end_date and valid_date(end_date) else None contract_id = O.GiscedataPolissa.search([('name', '=', contract_name)])[0] quarantine = {'kWh': [], 'euro': []} old_measures = get_measures_by_contract(O, contract_id, range(1,12)) new_measures = load_new_measures(O, contract_id)
'--from-date', dest='from_date', required=True, help= "Data a d'inici del consum a partir de la qual es busquen les factures" ) parser.add_argument( '--to-date', dest='to_date', required=True, help="Data fi del consum a partir de la qual es busquen les factures") parser.add_argument( '--output', dest='output', type=str, help="Output csv file", ) args = parser.parse_args() try: main(args.from_date, args.to_date, args.output) except (KeyboardInterrupt, SystemExit, SystemError): warn("Aarrggghh you kill me :(") except Exception as e: traceback.print_exc(file=sys.stdout) error("El proces no ha finalitzat correctament: {}", str(e)) else: success("Chao!")
for pol_id in pol_ids: n += 1 try: pol_read = pol_obj.read(pol_id,[ 'name', 'data_alta', 'data_ultima_lectura', 'comptadors', 'modcontractuals_ids', 'tarifa', 'distribuidora', 'cups', ]) except Exception as e: error(unicode(e)) break data_alta = pol_read['data_alta'] bigstep("{}/{}".format(n,total)) smallstep("Polissa {}".format(pol_read['name'])) smallstep("CUPS: {}".format(pol_read['cups'][1])) # smallstep("Distribuidora: {}".format(pol_read['distribuidora'][1])) --> problemes amb ascii try: if es_cefaco(pol_id): warn("Ja està detectada com a Reclamacio de Distribuidora") res.cefaco.append(pol_id) continue #next polissa cx06_ids = sw_obj.search([ ('cups_id','=',pol_read['cups'][0]),
def main(list_name, output): result = archieve_members_in_list(list_name.strip()) with open(output, 'w') as f: f.write(result) if __name__ == '__main__': parser = argparse.ArgumentParser( description='Archivieren Sie E-Mails in großen Mengen') parser.add_argument('--list', dest='list_name', required=True, help="nom de la llista de mailchimp") parser.add_argument('--output', dest='output', required=True, help="Fitxer de sortida amb els resultats") args = parser.parse_args() try: main(args.list_name, args.output) except Exception as e: traceback.print_exc(file=sys.stdout) error("El proceso no ha finalizado correctamente: {}", str(e)) else: success("Script finalizado")
def canvi_titus(O, new_owners, create_case=False): for new_client in new_owners: try: cups = new_client.get('CUPS', '').strip().upper() msg = "Creating new profile of {}, dni: {}" step( msg.format(new_client['Nom nou titu'], new_client['DNI'].strip().upper())) msg = "Getting address information of cups {}" step(msg.format(cups)) cups_address = get_cups_address(O, cups) contract_info = get_contract_info(O, new_client.get('Contracte', '')) old_owner_vat = O.ResPartner.read(contract_info.titular[0], ['vat'])['vat'] with transaction(O) as t: profile_data = create_fitxa_client( t, full_name=new_client['Nom nou titu'].strip(), vat='ES{}'.format(new_client['DNI'].strip().upper()), lang=LANG_TABLE.get(new_client['Idioma'].strip().upper(), 'es_ES'), email=new_client['Mail'].strip(), phone=new_client['Tlf'].strip(), street=cups_address['street'], postal_code=cups_address['dp'] or '', city_id=cups_address['id_municipi'], state_id=cups_address['id_state'], country_id=cups_address['id_country'], iban=sanitize_iban(new_client['IBAN'])) member_id = get_memberid_by_partner(t, profile_data.client_id) if create_case: msg = "Creating change owner M1(T) atr case {} -> {}" step( msg.format(old_owner_vat, new_client['DNI'].strip().upper())) changeowner_res = create_m1_chageowner( t, contract_number=new_client['Contracte'], cups=cups, new_owner_vat='ES{}'.format( new_client['DNI'].strip().upper()), new_owner_id=profile_data.client_id, old_owner_id=contract_info.titular, member_id=profile_data.client_id if member_id else False, address_id=profile_data.address_id, notification_address_id=profile_data.address_id, bank_id=profile_data.bank_id, signature_date=sanitize_date(new_client['Data']), cnae_id=contract_info.cnae[0], owner_change_type='T', lang=LANG_TABLE.get( new_client['Idioma'].strip().upper(), 'es_ES'), other_payer=False) t.GiscedataSwitching.write(changeowner_res[2], dict(state='draft')) msg = "Setting as not 'estimable' and updating observations "\ "to contract: {}" step(msg.format(new_client['Contracte'])) update_old_contract_information( t, contract_number=new_client['Contracte'], cups=cups, new_owner_id=profile_data.client_id, new_bank_id=profile_data.bank_id, member_id=member_id, request_date=new_client['Data']) except xmlrpclib.Fault as e: msg = "An error ocurred creating {}, dni: {}, contract: {}. Reason: {}" error( msg.format(new_client['Nom nou titu'], new_client['DNI'], new_client['Contracte'], e.faultString.split('\n')[-2])) except Exception as e: msg = "An error ocurred creating {}, dni: {}, contract: {}. Reason: {}" error( msg.format(new_client['Nom nou titu'], new_client['DNI'], new_client['Contracte'], e.message.encode('utf8'))) else: result = profile_data.deepcopy() if create_case: contract_id = get_last_contract_on_cups(O, cups) result['case_id'] = changeowner_res[2] result['new_contract_id'] = contract_id result['cups'] = cups msg = "M1 ATR case successful created with data:\n {}" success(msg.format(json.dumps(result, indent=4, sort_keys=True)))
''' Function taken from: webforms/webforms/model.py --> new_soci() Returns the Somenergia soci, or creates it if it does not exist. ''' soci_ids = t.SomenergiaSoci.search([ ('partner_id','=',partner_id), ]) if soci_ids: return soci_ids[0] return t.SomenergiaSoci.create_one_soci(partner_id) if __name__ == '__main__': step('Loading config file...') try: config = ns.load("config.yaml") except: error("Check config.yaml") raise step("Find and create 'socis' with no record in somenergia_soci") find_and_fix_soci_record(config.query_no_record_socis.sql, config.query_no_record_socis.output) step("Get socis considering: {}", config.queryfile1.sql) migrate_socis(config, config.queryfile1.sql, config.queryfile1.output) step("Migration completed!")
def avancar_polissa(polissa, counter, sem, result): with sem: polissa = ns(polissa) polissa.id_ultima_lectura = Measures.search([ ('comptador', 'in', polissa.comptadors), ]) polissa.id_ultima_lectura_pot = MeasuresPot.search([ ('comptador', 'in', polissa.comptadors), ]) success("") success(SEPPARATOR) success( "{}/{} polissa: {} id: {} data ultima lectura: {} CUPS: {} Tarifa: {} lot facturacio {}", counter + 1, polissaEndarrerida_ids_len, polissa.name, polissa.id, polissa.data_ultima_lectura, polissa.cups[1], polissa.tarifa[1], getStr(polissa.lot_facturacio, 1, 'cap'), ) success(SEPPARATOR) hasValidationError = False previous_draft_invoices = exist_draft_invoices_polissa(polissa) try: has_different_dates = checkDateLecturaPolissa(polissa) except Exception as e: has_different_dates = True # general execution error result.contractsCrashed.append(polissa.id) error("ERROR check data lectura polissa") error(unicode(e)) if has_different_dates: result.contractsWithError.append(polissa.id) if hasDraftABInvoice(polissa): result.contractsWithWrongDataLecturaWithAB.append(polissa.id) else: result.contractsWithWrongDataLecturaWithoutAB.append( polissa.id) else: try: error_generating_inv = generate_draft_invoices_polissa(polissa) except Exception as e: error_generating_inv = True if error_generating_inv: result.contractsWithError.append(polissa.id) if hasDraftABInvoice(polissa): result.contractsStrangedAndABDraft.append(polissa.id) else: result.contractsStrangedAndNoABDraft.append(polissa.id) # undoing the generated facts draft_invoice_ids = get_draft_invoices_from_polissa(polissa) generated_invoice_ids = get_generated_invoices_ids( previous_draft_invoices, draft_invoice_ids) undoDraftInvoicesAndMeasures(polissa, generated_invoice_ids) else: try: draft_invoice_ids = get_draft_invoices_from_polissa( polissa) generated_invoice_ids = get_generated_invoices_ids( previous_draft_invoices, draft_invoice_ids) validation_error = validate_draft_invoices( polissa, generated_invoice_ids) except Exception as e: validation_error = True if validation_error: result.contractsWithError.append(polissa.id) if hasDraftABInvoice(polissa): result.contractsWithValidationErrorAndABDraft.append( polissa.id) else: result.contractsWithValidationErrorAndNoABDraft.append( polissa.id) # undoing generated facts undoDraftInvoicesAndMeasures(polissa, generated_invoice_ids) else: if not hasDraftABInvoice(polissa): result.contracsWithoutAB.append(polissa.id) else: rectified = get_diff_ab_fe_resultat(polissa) if rectified >= 0: result.contractsWithABResultPositive.append( polissa.id) else: result.contractsWithABResultNegative.append( polissa.id) step("\tAnotem la polissa com a cas ok") result.contractsForwarded.append(polissa.id) draft_invoice_ids = get_draft_invoices_from_polissa( polissa) if not doit: generated_invoice_ids = get_generated_invoices_ids( previous_draft_invoices, draft_invoice_ids) undoDraftInvoicesAndMeasures(polissa, generated_invoice_ids) else: send_mail_open_send_invoices(draft_invoice_ids, polissa) #if not direct: # warn("prem entrar per avançar el següent contracte") # ignoreme = raw_input("") return counter, result
'Is Partner', ]) for line in dbutils.fetchNs(cursor) : try: totalUse = line.consumannual if totalUse is None: warn("Soci {} amb consum null".format( line.nsoci)) totalUse = 0 # continue if totalUse * recommendedPercent < shareUse * 100 : error("El soci {} no te prou consum ({})".format(line.nsoci, totalUse)) # continue if line.nif[:2] != 'ES': warn("Soci amb un VAT code no espanyol: {}".format(line.nif[:2])) recommendedShares = (totalUse*recommendedPercent/100) // shareUse recommendedInvestment = recommendedShares * shareCost print '\t'.join( str(x) .replace('\t',' ') .replace('\n',' ') .replace('\r',' ') for x in [ line.soci_id,
def fix_incoherent_cadastral_ref(): ''' Checks for incorrect cadastral reference format: in case of incorrect format, update it to a blank value. ''' erp_client = Client(**configdb.erppeek) gcp_obj = erp_client.GiscedataCupsPs id_cups_list = gcp_obj.search([("active", "=", True)]) n = 0 for id_cups in id_cups_list: cadastral_ref = gcp_obj.read(id_cups, ["ref_catastral"])["ref_catastral"] try: incoherent_cadastral_ref = evalrefcatastral.validate(cadastral_ref) except: if cadastral_ref: gcp_obj.write(id_cups, {"ref_catastral": ""}) n += 1 success("{} incoherent cadastral references fixed", n) if __name__ == '__main__': step("Find if cadastral reference has the proper format") try: fix_incoherent_cadastral_ref() except Exception as e: error("Something went wrong {}: check fix_ref_catastral.py", e)
#!/usr/bin/env python from jsonschema import validate from jsonschema.exceptions import ValidationError from yamlns import namespace as ns from consolemsg import step, error import os import sys def local(filename): return os.path.join(os.path.dirname(os.path.abspath(__file__)), filename) step("Loading schema") schema = ns.load(local("../peerdescriptor-schema.yaml")) for yamlfile in sys.argv[1:]: step("Validating {}", yamlfile) try: validate(ns.load(yamlfile), schema) except ValidationError as e: error( "Validation error at {filename}#/{path}:\n" "{msg}", filename=yamlfile, path='/'.join(format(x) for x in e.path), msg=e.message, )
#Nomes te un comptador if len(comp_ids) == 1: warn("Nomes te un comptador") res.un_comptador.append(pol_id) continue # TODO: Comprovar que hi hagi algun comptador #detectem els comptadors de baixa comp_baixa_ids = comp_obj.search([ ('id','in',comp_ids), ('active','=', False), ]) #Sense comptador de baixa (no hauria de passar mai amb aquesta tipologia d'error) if not (comp_baixa_ids): error("No te comptadors de baixa") res.sense_comptador_baixa.append(pol_id) continue #Busquem quin es el comptador que no te data de baixa (si n'hi ha mes d'un, al passar l'script varis cops, ja és solucionarà for comp_baixa_id in comp_baixa_ids: comp_baixa_read = comp_obj.read(comp_baixa_id, ['active','name','data_baixa']) data_baixa = comp_baixa_read['data_baixa'] lecturesFacturacio_ids = lectF_obj.search([ ('name','=',data_baixa), ('comptador','=',comp_baixa_id), ]) if not lecturesFacturacio_ids: warn("Comptador sense lectura de tancament: {}".format(comp_baixa_read['name'])) break else:
def fail(msg, *args, **params): error(msg, *args, **params) exit(-1)
consums = [ dict( contract_id = contracte['name'], supply_address = contracte['cups_direccio'], annual_use_kwh = cups['conany_kwh'], ) if cups else dict() for contracte, cups in ( (contracte, O.GiscedataCupsPs.read(contracte['cups'][0], ['conany_kwh'])) if contracte['cups'] else (contracte, None) for contracte in O.GiscedataPolissa.read( contractes, ['name','cups','cups_direccio']) ) ] if not len(consums) : error("El soci {} no te contractes".format(soci_id)) continue if any('annual_use_kwh' not in contract for contract in consums) : warn("El soci {} te un contracte sense consum anual calculat".format(soci_id)) continue shareUse = 170 recommendedPercent = 70 shareCost = 100 def ambPuntDeMilers(numero) : return '{:,}'.format(numero).replace(',','.') totalUse = sum((contract.get('annual_use_kwh',0) for contract in consums)) recommendedShares = (totalUse*recommendedPercent/100) // shareUse
'Unknown use', 'Small use', 'Is Partner', ])) for line in dbutils.fetchNs(cursor): try: totalUse = line.consumannual if totalUse is None: warn("Soci {} amb consum null".format(line.nsoci)) totalUse = 0 # continue if totalUse * recommendedPercent < shareUse * 100: error("El soci {} no te prou consum ({})".format( line.nsoci, totalUse)) # continue if line.nif[:2] != 'ES': warn("Soci amb un VAT code no espanyol: {}".format( line.nif[:2])) recommendedShares = (totalUse * recommendedPercent / 100) // shareUse recommendedInvestment = recommendedShares * shareCost output('\t'.join( u(x).replace('\t', ' ').replace('\n', ' ').replace( '\r', ' ') for x in [ line.soci_id, line.name,
success("Employee created: {}", empleat_data['name']) except Exception as e: msg = "I couldn\'t create a new empoyee {}, reason {}" warn(msg, name, e) def main(csv_file): impd = ImportData() impd.create_employees(csv_file) if __name__ == '__main__': parser = argparse.ArgumentParser( description='Custom import Employees from csv Script') parser.add_argument('--file', dest='csv_file', required=True, help="csv with employees data") args = parser.parse_args() try: main(args.csv_file) except Exception as e: traceback.print_exc(file=sys.stdout) error("Process failed: {}", str(e)) else: success("Finished!") # vim: et ts=4 sw=4
uri = getattr(configdb, 'API_URI', False) if not uri: raise Exception("No se ha definido a qué API ataca el script") crea_contractes(uri, csv_file) if __name__ == '__main__': parser = argparse.ArgumentParser( description='Erstellen Sie Verträge in loser Schüttung') parser.add_argument('--file', dest='csv_file', required=True, help="csv amb les noves fitxes cliente a crear") args = parser.parse_args() try: main(args.csv_file) except IOError as e: traceback.print_exc(file=sys.stdout) error("El formato del fichero tiene que ser UTF-8 sin BOM: {}", str(e)) except Exception as e: traceback.print_exc(file=sys.stdout) error("El proceso no ha finalizado correctamente: {}", str(e)) else: success("Script finalizado") # vim: et ts=4 sw=4