def setPlantParameters(plantname, **data): step("Setting parameters for plant '{plantname}'") with orm.db_session: plant = Plant.get(name=plantname) if not plant: error(f"Plant '{plantname}' not found") return if PlantModuleParameters.exists(plant=plant.id): if not force: error(f"Plant '{plant.name}' already has parameters, use --force to overwrite") return warn("Forcing removal of previous plant parameters") oldparams = PlantModule[plant.id] out(ns(oldparams.as_dict()).dump()) oldparams.delete() data = ns(data) params = PlantModuleParameters( plant=plant, n_modules = data.nModules, max_power_current_ma = int(data.Imp*1000), max_power_voltage_mv = int(data.Vmp*1000), current_temperature_coefficient_mpercent_c = int(data.temperatureCoefficientI*1000), voltage_temperature_coefficient_mpercent_c = int(data.temperatureCoefficientV*1000), standard_conditions_irradiation_w_m2 = int(data.irradiationSTC), standard_conditions_temperature_dc = int(data.temperatureSTC*10), degradation_cpercent = int(data.degradation*100), opencircuit_voltage_mv = int(data.Voc*1000), shortcircuit_current_ma = int(data.Isc*1000), expected_power_correction_factor_cpercent = int(data.get('correctionFactorPercent', 100)*100), )
def checkDateLecturaPolissaB(polissa): ko = False #polissa = Polissa.read(polissa,[ # 'id', # 'data_ultima_lectura', # ])[0] polissa = ns(polissa) step('Comprobando invoices en borrador de pol', polissa.id) invoices = get_invoices_from_polissa(polissa) invoices = Invoice.read(invoices, [ 'id', 'type', 'data_final', ]) last_invoice = ns(invoices[0]) step('facturas en borrador:', invoices) if last_invoice.type == 'out_invoice': if last_invoice.data_final != polissa.data_ultima_lectura: step('Factura FE o RE fecha diferente a data última lectura') ko = True if last_invoice.type == 'out_refund': last_invoice_not_AB = ns(get_last_invoice_not_AB(invoices)) if last_invoice_not_AB.data_final != polissa.data_ultima_lectura: step('Factura AB con fecha diferente a data última lectura') ko = True return ko
def test__string_inverter_registers_merge__inverterWithStrings(self): plant_registers = self.alibaba_registers_inverterWithStrings() devices_registers = plant_registers['Alibaba'] merged_devices_registers = string_inverter_registers_merge( devices_registers) expected_merged_devices_registers = [{ 'name': 'Alice', 'type': 'inverter', 'model': 'aros-solar', 'register_type': 'holding_registers', 'fields': ns( **self.inverter_registers(), **ns([('string1:intensity_mA', 100), ('string2:intensity_mA', 200)])) }] self.maxDiff = None self.assertEqual(len(expected_merged_devices_registers), 1) self.assertDictEqual(expected_merged_devices_registers[0], merged_devices_registers[0])
def create_fitxa_client(O, full_name, vat, lang, email, phone, street, postal_code, city_id, state_id, country_id, iban, become_member=False): partner_id, partner_created = get_or_create_partner( O, full_name, vat, lang, become_member) address_id, _ = get_or_create_partner_address(O, partner_id, street, postal_code, city_id, state_id, country_id, email, phone) bank_id = get_or_create_partner_bank(O, partner_id, iban, country_id) partner_data = ns(O.ResPartner.read(partner_id)) address_data = ns(O.ResPartnerAddress.read(address_id)) return ns( dict( client_id=partner_data.id, existent=not partner_created, name=partner_data.name, vat=partner_data.vat, address=address_data.street, address_id=address_id, bank_id=bank_id, ))
def change_to_tg(pol_ids): success('') success('Modificant polisses:') res = ns() totals = len(pol_ids) for counter, pol_id in enumerate(pol_ids): polissa = ns( pol_obj.read(pol_id, [ "name", "no_estimable", "observacions", "observacions_estimacio", ])) step("{}/{} polissa {}".format(counter + 1, totals, polissa.name)) header = "[{}] ".format(str(datetime.today())[:19]) if polissa.observacions: polissa.observacions = polissa.observacions.encode("utf-8") changes = { "observacions": header + missatge + "\n\n" + (polissa.observacions or ""), "observacions_estimacio": header + missatge, "no_estimable": True, } res[pol_id] = changes if doit: pol_obj.write(pol_id, changes) warn("modificat") return res
def test__ProductionPlant_getRegisters__oneInverter_oneString(self): plant = ProductionPlant() plant.load('test_data/modmap_testing_inverter_strings.yaml', self.testPlantname()) plant.devices[0].modmap['holding_registers'].get_registers = MagicMock( return_value=[ns([('power', 626)])]) plant.devices[1].modmap['holding_registers'].get_registers = MagicMock( return_value=[ns([('string1:intensity_mA', 100)])]) plant_registers = plant.get_registers() expected = { 'Alibaba': [{ 'name': 'inversor1', 'type': 'inverter', 'model': 'aros-solar', 'register_type': 'holding_registers', 'fields': [ns([('power', 626)])] }, { 'name': 'inversor1Strings', 'type': 'inverterStrings', 'model': 'aros-solar', 'register_type': 'holding_registers', 'fields': [ns([('string1:intensity_mA', 100)])] }] } self.maxDiff = None self.assertDictEqual(plant_registers, expected)
def test__ProductionPlant_getRegisters__oneInverter(self): plant = ProductionPlant() plant.load('test_data/modmap_testing_inverter.yaml', self.testPlantname()) for d in plant.devices: for v in d.modmap.values(): v.get_registers = MagicMock( return_value=[ns([('power', 626)])]) plant_registers = plant.get_registers() # list not empty self.assertTrue(plant_registers != []) expectedPlantRegisters = { 'Alibaba': [{ 'name': 'inversor1', 'type': 'inverter', 'model': 'aros-solar', 'register_type': 'holding_registers', 'fields': [ns([('power', 626)])] }] } self.maxDiff = None self.assertDictEqual(plant_registers, expectedPlantRegisters)
def test_usekwh_manyAssignments_firstHaveOldInvoicing(self): t = UsageTrackerMockup([30]) a = AssignmentsMockup([ ns( member_id='member1', last_usable_date=isodate('2013-10-01'), ), ns( member_id='member2', last_usable_date=isodate('2015-10-01'), ), ]) s = Dealer(usageTracker=t, assignmentProvider=a) result = s.use_kwh( contract_id = 1, first_date = isodate('2015-08-01'), last_date = isodate('2015-09-01'), fare = '2.0A', period = 'P1', kwh = 100, ) self.assertEqual(t.calls(),[ ('use_kwh', 'member2', '2014-08-01', '2015-09-01', '2.0A', 'P1', 100), ]) self.assertEqual(result, [ dict(member_id='member2', kwh=30), ])
def run(self, parent, blocks): block = blocks[0] pre, blocktype, params, post = self._extractHeadline(blocks[0]) if pre: self.parser.parseChunk(parent, pre) blocks[0] = post args, kwds = self._processParams(params) content = self._indentedContent(blocks) # Remove optional closing if present if blocks: blocks[0] = self.RE_END.sub('', blocks[0]) typeGenerators.update(self.config['generators']) generator = self._getGenerator(typeGenerators.get(blocktype, container)) ctx = ns() ctx.type = blocktype ctx.parent = parent ctx.content = content ctx.parser = self.parser ctx.metadata = self.parser.md.Meta if hasattr(self.parser.md, "Meta") else None ctx.config = ns(self.config.get('config',{})) outargs, kwds = self._adaptParams(generator, ctx, args, kwds) result = generator(*outargs, **kwds) if result is None: return True if type(result) == type(u''): result = result.encode('utf8') if type(result) == type(b''): result = etree.XML(result) parent.append(result) return True
def generate(ownKeyPair, values): payload = ns(values).dump() signature = sign(ownKeyPair, payload) result = ns() result.intercoopVersion = protocolVersion result.payload = encode(payload) result.signature = signature return result.dump()
def modmap_registers(self): return [ ns([('type', 'holding_registers'), ('registers', ns([(10, '1HR0'), (11, '1HR'), (12, '2HR2'), (13, '3HR3'), (14, '4HR4')])), ('scan', ns([('start', 10), ('range', 30)]))]) ]
def main(): options = ns() optarg = None cliargs = ns() keyarg = None args = [] for arg in sys.argv[1:]: if keyarg: cliargs[keyarg] = eval(arg) if arg.startswith("(") else arg keyarg = None continue if optarg: options[optarg] = arg optarg = None continue if arg.startswith('--'): keyarg = arg[2:] continue if arg.startswith('-'): optarg = arg[1:] continue args.append(arg) if not args: fail( "Argument required. Usage:\n" "{} <sqlfile> [-C <dbconfig.py>] [<yamlfile>] [--<var1> <value1> [--<var2> <value2> ..] ]" .format(sys.argv[0])) step("Loading {}...".format(args[0])) with open(args[0]) as sqlfile: query = sqlfile.read() variables = ns() if len(args) >= 2: step("Loading variables...".format(args[1])) variables = ns.load(args[1]) warn(variables.dump()) variables.update(cliargs) if 'C' in options: import imp config = imp.load_source('config', options.C) else: import config step("Connecting to the database...") db = psycopg2.connect(**config.psycopg) with db.cursor() as cursor: try: cursor.execute(query, variables) except KeyError as e: fail( "Missing variable '{key}'. Specify it in the YAML file or by using the --{key} option" .format(key=e.args[0])) print dbutils.csvTable(cursor)
def test_peers(self): self.write("sombogus.yaml",sombogusyaml) self.write("somacme.yaml",somacmeyaml) p = self.setupPortal() self.assertMultiLineEqual(ns(data=list(p)).dump(),ns(data=[ ns.loads(somacmeyaml), ns.loads(sombogusyaml), ]).dump() )
def get_last_invoice_not_AB(invoices): count_AB = 0 for elem in invoices: if ns(elem).type == 'out_refund': count_AB += 1 if count_AB >= 0 and ns(elem).type == 'out_invoice': count_AB -= 1 if count_AB < 0: return elem
def test_peers(self): self.write("sombogus.yaml", sombogusyaml) self.write("somacme.yaml", somacmeyaml) p = self.setupPortal() self.assertMultiLineEqual( ns(data=list(p)).dump(), ns(data=[ ns.loads(somacmeyaml), ns.loads(sombogusyaml), ]).dump())
def main(): options = ns() optarg = None cliargs = ns() keyarg = None args = [] for arg in sys.argv[1:]: if keyarg: cliargs[keyarg]=eval(arg) if arg.startswith("(") else arg keyarg=None continue if optarg: options[optarg]=arg optarg=None continue if arg.startswith('--'): keyarg = arg[2:] continue if arg.startswith('-'): optarg = arg[1:] continue args.append(arg) if not args: fail("Argument required. Usage:\n" "{} <sqlfile> [-C <dbconfig.py>] [<yamlfile>] [--<var1> <value1> [--<var2> <value2> ..] ]".format(sys.argv[0])) step("Loading {}...".format(args[0])) with open(args[0]) as sqlfile: query = sqlfile.read() variables = ns() if len(args)>=2: step("Loading variables...".format(args[1])) variables = ns.load(args[1]) warn(variables.dump()) variables.update(cliargs) if 'C' in options: import imp config=imp.load_source('config',options.C) else: import config step("Connecting to the database...") db = psycopg2.connect(**config.psycopg) with db.cursor() as cursor : try: cursor.execute(query, variables) except KeyError as e: fail("Missing variable '{key}'. Specify it in the YAML file or by using the --{key} option" .format(key=e.args[0])) print dbutils.csvTable(cursor)
def test__activateService_get__notFound(self): uuid = '01020304-0506-0708-090a-0b0c0d0e0f10' data = ns(uuid=uuid) r = self.client.get('/activateService/{}'.format(uuid)) self.assertEqual(ns.loads(r.data), ns( error = 'NoSuchUuid', message = "No personal data available for uuid " "'01020304-0506-0708-090a-0b0c0d0e0f10'", arguments=[uuid], )) self.assertEqual(r.status_code, 404)
def main(): options = ns() optarg = None cliargs = ns() keyarg = None args = [] for arg in sys.argv[1:]: if keyarg: cliargs[keyarg]=eval(arg) if arg.startswith("(") else arg keyarg=None continue if optarg: options[optarg]=arg optarg=None continue if arg.startswith('--'): keyarg = arg[2:] continue if arg.startswith('-'): optarg = arg[1:] continue args.append(arg) """ if not args: fail("Argument required. Usage:\n" "{} <sqlfile> [-C <dbconfig.py>] [<yamlfile>] [--<var1> <value1> [--<var2> <value2> ..] ]".format(sys.argv[0])) """ sqlfilename = os.path.join(os.path.dirname(__file__), "draftinvoices.sql") step("Loading {}...".format(sqlfilename)) with open(sqlfilename) as sqlfile: query = sqlfile.read() if 'C' in options: import imp config=imp.load_source('config',options.C) else: import config step("Connecting to the database...") db = psycopg2.connect(**config.psycopg) with db.cursor() as cursor : try: cursor.execute(query) except KeyError as e: fail("Missing variable '{key}'. Specify it in the YAML file or by using the --{key} option" .format(key=e.args[0])) print template.format(**dbutils.nsList(cursor)[0])
def main(): options = ns() optarg = None cliargs = ns() keyarg = None args = [] for arg in sys.argv[1:]: if keyarg: cliargs[keyarg] = eval(arg) if arg.startswith("(") else arg keyarg = None continue if optarg: options[optarg] = arg optarg = None continue if arg.startswith('--'): keyarg = arg[2:] continue if arg.startswith('-'): optarg = arg[1:] continue args.append(arg) """ if not args: fail("Argument required. Usage:\n" "{} <sqlfile> [-C <dbconfig.py>] [<yamlfile>] [--<var1> <value1> [--<var2> <value2> ..] ]".format(sys.argv[0])) """ sqlfilename = os.path.join(os.path.dirname(__file__), "draftinvoices.sql") step("Loading {}...".format(sqlfilename)) with open(sqlfilename) as sqlfile: query = sqlfile.read() if 'C' in options: import imp config = imp.load_source('config', options.C) else: import config step("Connecting to the database...") db = psycopg2.connect(**config.psycopg) with db.cursor() as cursor: try: cursor.execute(query) except KeyError as e: fail( "Missing variable '{key}'. Specify it in the YAML file or by using the --{key} option" .format(key=e.args[0])) print template.format(**dbutils.nsList(cursor)[0])
def exportPlants(db, skipEmpty=False): plantsns = ns() with orm.db_session: if db.Municipality.select().exists(): plantsns['municipalities'] = [ ns([('municipality', municipality.exportMunicipality())]) for municipality in db.Municipality.select() ] plantsns['plants'] = [ ns([('plant', plant.exportPlant(skipEmpty))]) for plant in db.Plant.select() ] return plantsns
def test__create_fitxa_client__when_invalid_bankaccount(self): personaldata = ns(configdb.personaldata) personaldata.nif = '40057001V' personaldata.iban = 'ES3121000019830104303220' fitxa_client_params = self.get_create_fitxa_client_params(personaldata) try: with discarded_transaction(O) as t: fitxa_client = ns( create_fitxa_client(t, **fitxa_client_params) ) except InvalidAccount as e: self.assertEquals(e.message, 'Invalid bank account.')
def test__activateService_get__notFound(self): uuid = '01020304-0506-0708-090a-0b0c0d0e0f10' data = ns(uuid=uuid) r = self.client.get('/activateService/{}'.format(uuid)) self.assertEqual( ns.loads(r.data), ns( error='NoSuchUuid', message="No personal data available for uuid " "'01020304-0506-0708-090a-0b0c0d0e0f10'", arguments=[uuid], )) self.assertEqual(r.status_code, 404)
def test_usekwh_singleAssignment_prioritariesDoInterfere(self): t = UsageTrackerMockup([10]) a = AssignmentsMockup([ ns( member_id='member1', last_usable_date=isodate('2014-10-01'), ), ]) s = Dealer(usageTracker=t, assignmentProvider=a) result = s.use_kwh( contract_id = 1, first_date = isodate('2015-08-01'), last_date = isodate('2015-09-01'), fare = '2.0A', period = 'P1', kwh = 100, ) self.assertEqual(t.calls(),[ ('use_kwh', 'member1', '2014-08-01', '2014-10-01', '2.0A', 'P1', 100), ]) self.assertEqual(result, [ dict(member_id='member1', kwh=10), ])
def parseArgumments(): import argparse parser = argparse.ArgumentParser() subparsers = parser.add_subparsers( title="Subcommands", dest='subcommand', ) listactive = subparsers.add_parser('listactive', help="list remainders", ) init = subparsers.add_parser('init', help="initialize remainders", ) clear = subparsers.add_parser('clear', help="clear remainders objects", ) for sub in init,: sub.add_argument( '--nshares', dest='nshares', type=int, metavar='NSHARES', help="number of shares" ), sub.add_argument( '--start', dest='start', type=str, metavar='START', help="start date" ) return parser.parse_args(namespace=ns())
def assertMeterListEqual(self, expected): self.assertNsEqual( ns(data=[ ns( name=name, plant_name=plantname, plant_code=plantcode, plant_description=description, ) for name, plantname, plantcode, description in orm.select(( meter.name, meter.plant.name, meter.plant.codename, meter.plant.description, ) for meter in self.pony.db.Meter) ]), expected)
def get(self, start, stop, filter, field, filling=None): assert start.tzinfo is not None, ( "MongoTimeCurve.get called with naive (no timezone) start date") assert stop.tzinfo is not None, ( "MongoTimeCurve.get called with naive (no timezone) stop date") ndays = (stop.date()-start.date()).days+1 data = numpy.zeros(ndays*hoursPerDay, numpy.int) if filling : filldata = numpy.zeros(ndays*hoursPerDay, numpy.bool) filters = dict( name = filter, datetime = { '$gte': start, '$lt': addDays(stop,1) } ) for x in (self.collection .find(filters, [field,'datetime']) .sort('create_at',pymongo.ASCENDING) ): point = ns(x) localTime = toLocal(asUtc(point.datetime)) timeindex = dateToCurveIndex (start, localTime) data[timeindex]=point.get(field) if filling: filldata[timeindex]=True if filling: return data, filldata return data
def test_getFields_filtering(self): storage = userinfo.UserInfo(self.datadir) data = storage.getFields('myuser', [ 'nif', ]) self.assertEqual(data, ns(nif='12345678Z', ))
def sorteddict(d): if type(d) not in (dict, ns): return d return ns(sorted( (k, sorteddict(v)) for k,v in d.items() ))
def test_refundkwh(self): t = UsageTrackerMockup([20]) a = AssignmentsMockup([ ns( member_id='member1', last_usable_date=isodate('2015-10-01'), ), ]) s = Dealer(usageTracker=t, assignmentProvider=a) result = s.refund_kwh( contract_id = 1, first_date = isodate('2015-08-01'), last_date = isodate('2015-09-01'), fare = '2.0A', period = 'P1', kwh = 100, member_id='member1', ) self.assertEqual(t.calls(),[ ('refund_kwh', 'member1', '2014-08-01', '2015-09-01', '2.0A', 'P1', 100), ]) self.assertEqual(result, 20)
def search_all_to_tg(): #counters res = ns({ 'candidates': [], 'esborranys_no_ultimalectura': [], 'activa_no_ultimalectura': [], }) pol_ids = pol_obj.search([ ('no_estimable', '=', False), # estimable ]) res.candidates = pol_ids success('') success("Candidats a passar a no estimable ........... {}", len(res.candidates)) success("Candidats: esborranys sense ultima lectura .. {}", len(res.esborranys_no_ultimalectura)) success("Candidats: activa sense ultima lectura ...... {}", len(res.activa_no_ultimalectura)) if query: success("") success("Candidats: {}", res.candidates) success("Candidats: esborranys sense ultima lectura . {}", res.esborranys_no_ultimalectura) success("Candidats: activa sense ultima lectura ..... {}", res.activa_no_ultimalectura) return res
def alibaba_registers_inverterWithStrings(self): registers = { 'Alibaba': [{ 'name': 'Alice', 'type': 'inverter', 'model': 'aros-solar', 'register_type': 'holding_registers', 'fields': self.inverter_registers(), }, { 'name': 'AliceStrings', 'type': 'inverterStrings', 'model': 'aros-solar', 'register_type': 'holding_registers', 'fields': ns([('string1:intensity_mA', 100), ('string2:intensity_mA', 200)]) }] } return registers
def test_parse_missingPeerField(self): values = ns(self.values) del values.originpeer message = self.setupMessage(values=values) self.assertParseRaises( message, packaging.MissingField, "Required field 'originpeer' missing on the payload")
def get_cups_address(O, cups): try: cups_address_data = O.GiscedataCupsPs.read( O.GiscedataCupsPs.search([ ('name', 'ilike', cups), ('active', '=', True) ])[0], ['direccio', 'dp', 'id_municipi'] ) except IndexError as e: raise Exception("Cups {} not found".format(cups)) else: id_municipi = cups_address_data['id_municipi'][0] cups_address_data['id_municipi'] = id_municipi id_state = O.ResMunicipi.read(id_municipi, ['state'])['state'][0] cups_address_data['id_state'] = id_state id_country = O.ResCountryState.read(id_state, ['country_id'])['country_id'][0] cups_address_data['id_country'] = id_country cups_address_data['street'] = cups_address_data['direccio'] del cups_address_data['direccio'] del cups_address_data['id'] return ns(cups_address_data)
def test_parse_unrecognizedPeer(self): message = self.setupMessage( values=ns(self.values, originpeer='badpeer') ) self.assertParseRaises(message, packaging.BadPeer, "The entity 'badpeer' is not a recognized one")
def test_parse_missingPeerField(self): values= ns(self.values) del values.originpeer message = self.setupMessage(values=values) self.assertParseRaises(message, packaging.MissingField, "Required field 'originpeer' missing on the payload")
def __test_PublishOrmIfPlantNotExist(self): inverter_name = 'Alice' plant_name = 'SomEnergia_Alibaba' with orm.db_session: alcolea = self.pony.db.Plant(name=plant_name, codename='SOMSC01', description='descripción de planta') inverter = self.pony.db.Inverter(name=inverter_name, plant=alcolea) metrics = ns([ ('daily_energy_h_wh', 0), ('daily_energy_l_wh', 17556), ('e_total_h_wh', 566), ('e_total_l_wh', 49213), ('h_total_h_h', 0), ('h_total_l_h', 18827), ('pac_r_w', 0), ('pac_s_w', 0), ('pac_t_w', 0), ('powerreactive_t_v', 0), ('powerreactive_r_v', 0), ('powerreactive_s_v', 0), ('temp_inv_dc', 320), ('time', datetime.datetime.now(datetime.timezone.utc)), # Sensors registers obtained from inverters ('probe1value', 443), ('probe2value', 220), ('probe3value', 0), ('probe4value', 0), ]) self.storage.storeInverterMeasures('UnknownPlant', inverter_name, metrics) self.assertListEqual(self.storage.inverterReadings(), [])
def __init__(self, **kwds): self._config = ns(kwds) self._client = None self._session = None lastDates = Path(self._config.lastDateFile) if not lastDates.exists(): lastDates.write_text("{}")
def translate(self, data): if type(data) == ns: # defined language for lang in self.language: try: return data[lang] except KeyError: pass # fallback language try: return data[self.fallback] except KeyError: pass # Non translatable dict return ns( (key, self.__call__(value)) for key, value in data.items() ) if type(data) == list: return [ self.__call__(item) for item in data ] return data
def __getattr__(self, name): if name not in self.cache.keys(): extractor = self.factory_data_extractor(name) data = extractor.get_data(self.cursor, self.uid, self.object, self.extra_text, self.context) self.cache[name] = ns.loads(ns(data).dump()) return self.cache[name]
def handle(e, status_code): response = make_response( ns(error=type(e).__name__, message=format(e), arguments=e.arguments if hasattr(e, 'arguments') else []).dump()) response.mimetype = 'application/yaml' response.status_code = status_code return response
def parseArguments(): # TODO parse arguments into a ns args = ns() if len(sys.argv) == 3: args[sys.argv[1]] = sys.argv[2] return args else: return args
def assertMeterRegistryEqual(self, plantCode, meterName, expected): plant = self.pony.db.Plant.get(codename=plantCode) meter = self.pony.db.Meter.get(plant=plant, name=meterName) registry = [ ns( # yamlns reads datetimes just as date, compare the string time=str(line.time), export_energy_wh=line.export_energy_wh, import_energy_wh=line.import_energy_wh, r1_VArh=line.r1_VArh, r2_VArh=line.r2_VArh, r3_VArh=line.r3_VArh, r4_VArh=line.r4_VArh, ) for line in orm.select( l for l in self.pony.db.MeterRegistry if l.meter == meter) ] self.assertNsEqual(ns(registry=registry), expected)
def test_getFields_filtering(self): storage = userinfo.UserInfo(self.datadir) data = storage.getFields('myuser', [ 'nif', ]) self.assertEqual(data, ns( nif = '12345678Z', ))
def handle(e, status_code): response = make_response(ns( error=type(e).__name__, message=format(e), arguments=e.arguments if hasattr(e,'arguments') else [] ).dump()) response.mimetype='application/yaml' response.status_code = status_code return response
def parseArgumments(): import argparse parser = argparse.ArgumentParser() subparsers = parser.add_subparsers( title="Subcommands", dest='subcommand', ) listactive = subparsers.add_parser('listactive', help="list rights per share", ) init = subparsers.add_parser('init', help="init rights per share", ) clear = subparsers.add_parser('clear', help="clear rights per share", ) for sub in init, listactive,: sub.add_argument( '--nshares', dest='nshares', type=sequence, metavar='NSHARES', required=True, help="number of shares" ), sub.add_argument( '--start', dest='start', type=str, metavar='START', required=True, help="start date" ), for sub in listactive,: sub.add_argument( '--end', dest='end', type=str, metavar='END', help="end date" ) for sub in init,: sub.add_argument( '--ndays', dest='ndays', type=int, metavar='NDAYS', help="number of days" ), sub.add_argument( dest='rights', nargs='+', help="rights" ) return parser.parse_args(namespace=ns())
def test__activateService_get(self): values = ns.loads(self.yaml) uuid = self.storage.store(values) data = ns(uuid=uuid) r = self.client.get('/activateService/{}'.format(uuid)) self.assertEqual(ns.loads(r.data), values) self.assertEqual(r.status_code, 200)
def __init__(self, effectiveInvestments): self._contracts = [ ns( member=member, firstEffectiveDate=isodate(start), lastEffectiveDate=isodate(end), shares=shares, ) for member, start, end, shares in effectiveInvestments ]
def store(self, data={}, **kwds): token = crypto.uuid() filename = self._tokenfile(token) data = ns(data) data.update(**kwds) content = data.dump() # TODO: use dump with filename when yamlns fixes Py2 issues with open(filename, 'wb') as f: f.write(content.encode('utf-8')) return token
def setupMessage(self, values=None, yaml=None, signedyaml=None, payload=None, removedFromMessage=[], version=None, ): values = ns(values or self.values) yaml = yaml or values.dump() messageValues = ns( intercoopVersion = version or packaging.protocolVersion, signature = crypto.sign(self.key, signedyaml or yaml), payload = payload or crypto.encode(yaml), ) for field in removedFromMessage: del messageValues[field] return messageValues.dump()
def _firstLastDate(self, name, first=False): """returns the date of the first or last item of a given name""" order = pymongo.ASCENDING if first else pymongo.DESCENDING for point in (self.collection .find(dict(name=name)) .sort('datetime', order) .limit(1) ): return toLocal(asUtc(ns(point).datetime)).replace( hour=0,minute=0,second=0) return None
def fetchNs(cursor): """ Wraps a database cursor so that instead of providing data as arrays, it provides objects with attributes named as the query column names. """ fields = [column.name for column in cursor.description] for row in cursor: yield ns(zip(fields, row)) raise StopIteration
def test_activateService_missingField(self): error = ns( error = 'MissingField', message = "Required field 'originpeer' missing on the payload", arguments = ['originpeer'] ) data = ns(self.personalData) del data.originpeer with self.respondToPost(403,error.dump()) as m: with self.assertRaises(packaging.MissingField) as ctx: url=self.client.activateService( service=self.service, personalData=self.personalData, ) self.assertEqual(format(ctx.exception), "Required field 'originpeer' missing on the payload", )
def test_isactive_manyAssigments_oneWithoutInvestment(self): t = UsageTrackerMockup([]) a = AssignmentsMockup([ ns( member_id='member1', last_usable_date=isodate('2015-10-01'), ), ns( member_id='member2', last_usable_date=isodate('2015-10-01'), ), ]) i = InvestmentMockup(['member1']) s = Dealer(usageTracker=t, assignmentProvider=a, investments=i) self.assertTrue(s.is_active( contract_id=1, first_date=isodate('2015-10-01'), last_date=isodate('2015-10-01'), ))
def parseArgumments(): import argparse parser = argparse.ArgumentParser() subparsers = parser.add_subparsers( title="Subcommands", dest='subcommand', ) clear = subparsers.add_parser('clear', help="clear usage", ) return parser.parse_args(namespace=ns())
def getFields(self, user, fields): userdata = self._userData(user) for field in fields: if field not in userdata: raise BadField(field) return ns([ (key, value) for key,value in userdata.items() if key in fields ])
def respondToPost(self, status, text=None, mimetype='application/yaml'): text = text or ns( continuationUrl = self.continuationUrl ).dump() m = requests_mock.mock() m.post( self.apiurl+'/activateService', status_code = status, text = text, headers = {'content-type': mimetype}, ) return m
def test__activateService_post__missingField(self): data = ns.loads(self.yaml) del data.originpeer package = packaging.generate(self.key, data) r = self.client.post('/activateService', data=package) self.assertEqual(ns.loads(r.data), ns( error = 'MissingField', message = "Required field 'originpeer' missing on the payload", arguments = ['originpeer'] )) self.assertEqual(r.status_code, 400)
def getFields(self, user, fields=None): if fields is not None: for field in fields: if field not in self.supportedFields: raise BadField(field) return ns(( (key, value) for key, value in self.getFields(user).items() if key in fields )) O = Client(**self.config) ids = O.ResPartner.search([ ('vat', '=', 'ES' + user), ]) if not ids: raise BadUser(user) partner_id = ids[0] partner = O.ResPartner.browse(partner_id) address_id = O.ResPartnerAddress.search([ ('partner_id','=',partner_id), ]) return ns( originpeer='somenergia', nif=user, lang=partner.lang[:2], name=partner.name, innerid=partner.www_soci, peerroles=['member'] if partner.www_soci != '---------' else [], address=partner.www_street, city=partner.www_municipi.ine, state=partner.www_provincia.code, postalcode=partner.www_zip, country='ES', # TODO: Take it from the ERP email=partner.www_email, phone=partner.www_phone or partner.www_mobile, proxynif='TODO', proxyname='TODO', )