def legacyInfluxUpload(): plantInflux = ProductionPlant() if not plantInflux.load('conf/modmapLegacyInflux.yaml', 'Alcolea'): logger.error('Error loading yaml definition file...') sys.exit(-1) data = ns.load('conf/modmap.yaml') for plant_data in data.plantmonitor: influxdb = plant_data.influx plantInfluxRegisters = plantInflux.get_registers() fluxStorage = InfluxMetricStorage(influxdb) for i, device in enumerate(plantInflux.devices): inverter_name = plantInflux.devices[i].name inverter_registers = plantInfluxRegisters[i]['Alcolea'][0]['fields'] logger.info("**** Saving data in database ****") logger.info("**** Metrics - tag - %s ****" % inverter_name) logger.info("**** Metrics - tag - location %s ****" % plantInflux.name) logger.info("**** Metrics - fields - %s ****" % inverter_registers) logger.info("**** Log to flux ****") fluxStorage.storeInverterMeasures(plantInflux.name, inverter_name, inverter_registers) logger.info("Sleeping 5 secs") import time time.sleep(5) logger.info("Done influx upload")
def uploadProduction(self, facility, data): response = self._client.service.setObservation( dict( header=self._session.header, facilityId=facility, variableId='prod', measurementType='MEAN', measurementTime=60, # minutes unit='W', observationData=dict(item=[ dict( startTime=startTime, data=value, ) for startTime, value in data ]), )) if self._showResponses(): print("joete", response) if response.errorCode != "OK": if response.errorCode == "INVALID_FACILITY_ID": raise MeteologicaApiError("{}: {}".format( response.errorCode, facility)) else: raise MeteologicaApiError(response.errorCode) # TODO session renewal not tested yet self._session.header['sessionToken'] = response.header['sessionToken'] lastDateOfCurrentBatch = max(date for date, measure in data) lastDates = ns.load(self._config.lastDateFile) lastDates[facility] = max(lastDates.get(facility, ''), str(lastDateOfCurrentBatch)) lastDates.dump(self._config.lastDateFile) return response.errorCode
def setUp(): configdb = ns.load('../conf/config.yaml') conn = psycopg2.connect(user=configdb['psql_user'], password=configdb['psql_password'], host=configdb['psql_host'], port=configdb['psql_port'], database=configdb['psql_db']) cur = conn.cursor() # Connexió postgresql with psycopg2.connect(user=configdb['psql_user'], password=configdb['psql_password'], host=configdb['psql_host'], port=configdb['psql_port'], database=configdb['psql_db']) as conn: with conn.cursor() as cur: cur.execute( "CREATE TABLE if not exists forecastHead(id SERIAL NOT NULL, errorCode VARCHAR(50), facilityId VARCHAR(50), \ variableId VARCHAR(50), predictorId VARCHAR(20), forecastDate TIMESTAMPTZ, granularity INTEGER, PRIMARY KEY(id));" ) cur.execute( "CREATE TABLE forecastData(idForecastHead SERIAL REFERENCES forecastHead(id), time TIMESTAMPTZ, percentil10 INTEGER, percentil50 INTEGER, \ percentil90 INTEGER, PRIMARY KEY(idForecastHead,time));") cur.execute("SELECT create_hypertable('forecastData', 'time');") cur.execute(""" CREATE TABLE facility_meter (id serial primary key, facilityid character varying(200), meter character varying(200)); """) return
def main(): options = ns() optarg = None cliargs = ns() keyarg = None args = [] for arg in sys.argv[1:]: if keyarg: cliargs[keyarg] = eval(arg) if arg.startswith("(") else arg keyarg = None continue if optarg: options[optarg] = arg optarg = None continue if arg.startswith('--'): keyarg = arg[2:] continue if arg.startswith('-'): optarg = arg[1:] continue args.append(arg) if not args: fail( "Argument required. Usage:\n" "{} <sqlfile> [-C <dbconfig.py>] [<yamlfile>] [--<var1> <value1> [--<var2> <value2> ..] ]" .format(sys.argv[0])) step("Loading {}...".format(args[0])) with open(args[0]) as sqlfile: query = sqlfile.read() variables = ns() if len(args) >= 2: step("Loading variables...".format(args[1])) variables = ns.load(args[1]) warn(variables.dump()) variables.update(cliargs) if 'C' in options: import imp config = imp.load_source('config', options.C) else: import config step("Connecting to the database...") db = psycopg2.connect(**config.psycopg) with db.cursor() as cursor: try: cursor.execute(query, variables) except KeyError as e: fail( "Missing variable '{key}'. Specify it in the YAML file or by using the --{key} option" .format(key=e.args[0])) print dbutils.csvTable(cursor)
def main(): args = parseArguments() configfile = args.get('--config', 'conf/config_meteologica.yaml') configdb = ns.load(configfile) configdb.update(args) upload_meter_data(configdb)
def get(self, url): cachefile = self._url2path(url) if cachefile.exists(): info = ns.load(str(cachefile)) return self._namespace2response(info) response = requests.get(url) if response.ok: self._response2namespace(response).dump(self._url2path(url)) return response
def task_daily_download_from_api_meteologica(test_env=True): pony = PonyManager(envinfo.DB_CONF) pony.define_all_models() pony.binddb(create_tables=False) configdb = ns.load('conf/config_meteologica.yaml') with orm.db_session: downloadMeterForecasts(pony.db, configdb, test_env=test_env)
def get(self,peername): import re if not re.match('^\\w+$', peername): raise Exception("Invalid peer '{}'".format(peername)) filename = os.path.join(self.datadir, peername+'.yaml') try: return ns.load(filename) except IOError: raise Exception("Not such peer '{}'".format(peername))
def clearDb(): configdb = ns.load('../conf/config.yaml') with psycopg2.connect(user=configdb['psql_user'], password=configdb['psql_password'], host=configdb['psql_host'], port=configdb['psql_port'], database=configdb['psql_db']) as conn: with conn.cursor() as cur: cur.execute("DELETE FROM forecastData;") cur.execute("DELETE FROM forecastHead;")
def get(self, peername): import re if not re.match('^\\w+$', peername): raise Exception("Invalid peer '{}'".format(peername)) filename = os.path.join(self.datadir, peername + '.yaml') try: return ns.load(filename) except IOError: raise Exception("Not such peer '{}'".format(peername))
def main(): options = ns() optarg = None cliargs = ns() keyarg = None args = [] for arg in sys.argv[1:]: if keyarg: cliargs[keyarg]=eval(arg) if arg.startswith("(") else arg keyarg=None continue if optarg: options[optarg]=arg optarg=None continue if arg.startswith('--'): keyarg = arg[2:] continue if arg.startswith('-'): optarg = arg[1:] continue args.append(arg) if not args: fail("Argument required. Usage:\n" "{} <sqlfile> [-C <dbconfig.py>] [<yamlfile>] [--<var1> <value1> [--<var2> <value2> ..] ]".format(sys.argv[0])) step("Loading {}...".format(args[0])) with open(args[0]) as sqlfile: query = sqlfile.read() variables = ns() if len(args)>=2: step("Loading variables...".format(args[1])) variables = ns.load(args[1]) warn(variables.dump()) variables.update(cliargs) if 'C' in options: import imp config=imp.load_source('config',options.C) else: import config step("Connecting to the database...") db = psycopg2.connect(**config.psycopg) with db.cursor() as cursor : try: cursor.execute(query, variables) except KeyError as e: fail("Missing variable '{key}'. Specify it in the YAML file or by using the --{key} option" .format(key=e.args[0])) print dbutils.csvTable(cursor)
def load(self, yamlFile, plant_name): data = ns.load(yamlFile) for plant_data in data.plantmonitor: if plant_data.enabled and plant_data.name == plant_name: self.name = plant_data.name self.description = plant_data.description for device_data in plant_data.devices: new_device = ProductionDevice() if new_device.load(device_data): self.devices.append(new_device) return True
def tearDown(): # Connexió postgresql configdb = ns.load('../conf/config.yaml') with psycopg2.connect(user=configdb['psql_user'], password=configdb['psql_password'], host=configdb['psql_host'], port=configdb['psql_port'], database=configdb['psql_db']) as conn: with conn.cursor() as cur: #cur.execute("DROP TABLE forecastData;") #cur.execute("DROP TABLE forecastHead;") cur.execute("DROP TABLE facility_meter;")
def test_load_fromFile(self): data = u"hi: caña\n" import codecs with codecs.open("test.yaml",'w',encoding='utf-8') as f: f.write(data) try: result = namespace.load("test.yaml") self.assertEqual(result, namespace(hi=u'caña')) except: raise finally: import os os.unlink("test.yaml")
def main(): args = parseArguments() configfile = args.get('--config', 'conf/config.yaml') configdb = ns.load(configfile) configdb.update(args) from conf import envinfo pony = PonyManager(envinfo.DB_CONF) pony.define_all_models() pony.binddb() downloadMeterForecasts(pony.db, configdb)
def format_yaml(filename=None): if not filename: sys.exit("No hi ha cap arxiu per crear contractes") data = ns.load(filename) data = data['post'] dataformated = [] for d in data.keys(): dataformated.append((d, data[d])) print dataformated status, reason, text = add_member(dataformated) print 'Status: ' + str(status) print 'Reason: ' + str(reason) print 'Text: ' + str(text)
def main(): args = parseArguments() configfile = args.get('--config', 'conf/config_meteologica.yaml') configdb = ns.load(configfile) configdb.update(args) PlantmonitorDB(configdb).demoDBsetup(configdb) with PlantmonitorDB(configdb) as db: alcolea = "SomEnergia_Alcolea" fontivsolar = "SomEnergia_Fontivsolar" perpinya = "SomEnergia_Perpinya" db.addFacilityMeterRelation(alcolea, '123401234') rows = { alcolea: normReadings(dt.datetime(2020, 4, 1, 0), dt.datetime(2020, 6, 1, 0)) } db.addMeterData(rows) logger.debug('Inserted {} entries for {}'.format( len(rows[alcolea]), alcolea)) db.addFacilityMeterRelation(fontivsolar, '567805678') rows = { fontivsolar: normReadings(dt.datetime(2020, 3, 20, 0), dt.datetime(2020, 4, 26, 0)) } db.addMeterData(rows) logger.debug('Inserted {} entries for {}'.format( len(rows[fontivsolar]), fontivsolar)) db.addFacilityMeterRelation(perpinya, '909009090') rows = { perpinya: normReadings(dt.datetime(2020, 4, 11, 0), dt.datetime(2020, 5, 27, 0)) } db.addMeterData(rows) logger.debug('Inserted {} entries for {}'.format( len(rows[perpinya]), perpinya))
def setUp(self): self.maxDiff=None self.flux_config = flux_config = ns.load( 'conf/modmap.yaml').plantmonitor[0].influx flux_config.influxdb_database = 'unittesting' self.flux_client = client_db( flux_config['influxdb_ip'], flux_config['influxdb_port'], flux_config['influxdb_user'], flux_config['influxdb_password'], flux_config['influxdb_database'], ssl=flux_config['influxdb_ssl'], verify_ssl=flux_config['influxdb_verify_ssl'] ) self.flux_client.drop_database( self.flux_config.influxdb_database ) self.flux_client.create_database( flux_config.influxdb_database )
def test_get_unicode(self): f = Fetcher(cache=self.cachedir) responses.add( method='GET', url='http://google.com', status=200, body=u"La caña", ) response = f.get('http://google.com') cacheFile = f._url2path('http://google.com') cached = ns.load(str(cacheFile)) self.assertNsEqual( cached, """\ url: http://google.com/ status_code: 200 headers: Content-Type: text/plain; charset=utf-8 text: La caña encoding: utf-8 """) self.assertEqual(len(responses.calls), 1)
def lastDateUploaded(self, facility): lastDates = ns.load(self._config.lastDateFile) lastDate = lastDates.get(facility, None) return todt(lastDate)
def __iter__(self): wildcard = os.path.join(self.datadir, '*.yaml') return (ns.load(filename) for filename in sorted(glob.glob(wildcard)))
def _userData(self, user): try: return ns.load(os.path.join(self.datadir, user+'.yaml')) except Exception: raise BadUser(user)
p['name'], potencia_activa, potencia) def get_dades_from_csv(): with open('dades_id_polissa_potencia_dist.csv') as csv_file: data = [] for row in csv_file: data_line = row.rstrip().split('\t') data.append(data_line) return data def main(): step("Get all potenciadist from {} drive", config.document['filename']) get_id_polissa_potencia_dist_from_drive() data = get_dades_from_csv() update_dades_erp(data) if __name__ == '__main__': try: config = ns.load("configdoc.yaml") except: error("Check configdoc.yaml") raise main()
#!/usr/bin/env python from jsonschema import validate from jsonschema.exceptions import ValidationError from yamlns import namespace as ns from consolemsg import step, error import os import sys def local(filename): return os.path.join(os.path.dirname(os.path.abspath(__file__)), filename) step("Loading schema") schema = ns.load(local("../peerdescriptor-schema.yaml")) for yamlfile in sys.argv[1:]: step("Validating {}", yamlfile) try: validate(ns.load(yamlfile), schema) except ValidationError as e: error( "Validation error at {filename}#/{path}:\n" "{msg}", filename=yamlfile, path='/'.join(format(x) for x in e.path), msg=e.message, )
def init(filename): if filename: aggr = setupAggregator(ns.load(filename)) aggr_obj.update_kwh(aggr['id'])
def migrateLegacyToPony(configdbns, excerpt=False, skipList=[]): plantName = 'Alcolea' if 'plants' not in skipList: logger.info("Create Plants") createPlants() if 'inverters' not in skipList: logger.info("Migrate Inverters") migrateLegacyInverterTableToPony(configdbns, excerpt) if 'meters' not in skipList: logger.info("Migrate Meters") migrateLegacyMeterTableToPony(configdbns, excerpt) if 'sensors' not in skipList: logger.info("Migrate SensorIrradiation") migrateLegacySensorIrradiationTableToPony( configdbns, plantName=plantName, deviceName='irradiation_alcolea', excerpt=excerpt) logger.info("Migration complete") if __name__ == "__main__": configdbns = ns.load('conf/configlegacydb.yaml') migrateLegacyToPony(configdbns)
def legacyMigrate(skipList=[]): configdbns = ns.load('conf/configlegacydb.yaml') migrateLegacyToPony(configdbns, skipList=skipList)
def forecast(): configdb = ns.load('../conf/config.yaml') timeDelta = configdb['time_delta'] username = configdb['psql_user'] # Connexió api meteologica client = Client(configdb['meteo_test_url']) meteoCredentials = { 'username': configdb['meteo_user'], 'password': configdb['meteo_password'] } loginResult = client.service.login(meteoCredentials) if loginResult.errorCode != 'OK': print('Connection failed with error code {}'.format( loginResult.errorCode)) sys.exit(-1) keepAlive = loginResult keepAlive = client.service.keepAlive(keepAlive) request = keepAlive head = keepAlive['header'] #client.service.getForecast({'header': header, 'facilityId': 'SomEnergia_Alcolea', 'variableId': 'prod', 'predictorId':'aggregated'}) facilitiesResponse = client.service.getAllFacilities(request) variableId = 'prod' predictorId = 'aggregated' granularity = '60' utcnow = datetime.now(datetime.timezone.utc) forecastDate = '2020-05-05T00:00:00.0000+02:00' #addtz(utcnow) fromDate = forecastDate utcthen = utcnow + timedelta(days=30) toDate = '2020-04-19T00:00:00.0000+02:00' #addtz(utcthen) #forecastRequest = {'header': head, 'facilityId': 'SomEnergia_Alcolea', 'variableId': variableId, 'predictorId': predictorId, 'forecastDate': forecastDate, 'fromDate': fromDate, 'toDate': toDate} # Connexió postgresql start = time.perf_counter() with psycopg2.connect(user=configdb['psql_user'], password=configdb['psql_password'], host=configdb['psql_host'], port=configdb['psql_port'], database=configdb['psql_db']) as conn: with conn.cursor() as cur: for facilityItem in facilitiesResponse['facilityItems']['item']: print("Processing facility {}".format( facilityItem['facilityName'])) ministart = time.perf_counter() facilityId = facilityItem['facilityId'] forecastRequest = { 'header': head, 'facilityId': facilityId, 'variableId': variableId, 'predictorId': predictorId, 'granularity': granularity, 'forecastDate': forecastDate, 'fromDate': fromDate, 'toDate': toDate } forecast = client.service.getForecast(forecastRequest) errorCode = forecast['errorCode'] forecastData = forecast['forecastData'] cur.execute( "INSERT INTO forecastHead(errorCode, facilityId, variableId, \ predictorId, forecastDate, granularity) VALUES ('{}', '{}', '{}', '{}', '{}', '{}') \ RETURNING id;".format(errorCode, facilityId, variableId, predictorId, forecastDate, granularity)) currentIdForecastHead = cur.fetchone()[0] if errorCode == 'OK': forecastDataDict = [ entry.split('~') for entry in forecastData.split(':') if entry ] # first entry is empty, probably slicing is faster than filtering realFromDate = unixToISOtz(forecastDataDict[0][0]) realToDate = unixToISOtz(forecastDataDict[-1][0]) cur.execute( "DELETE FROM forecastdata USING forecasthead WHERE forecastdata.idforecasthead = forecasthead.id AND forecasthead.facilityId = '{}' AND time BETWEEN '{}' AND '{}'" .format(facilityId, realFromDate, realToDate)) #https://hakibenita.com/fast-load-data-python-postgresql psycopg2.extras.execute_values( cur, "INSERT INTO forecastData VALUES %s;", (( currentIdForecastHead, unixToISOtz(record[0]), record[1], record[2], record[3], ) for record in forecastDataDict), page_size=1000) elapsed = time.perf_counter() - ministart print("\t{} ({}) {:.4} s".format(facilityItem['facilityName'], facilityId, elapsed)) elapsed = time.perf_counter() - start print('Total elapsed time {:0.4}'.format(elapsed))
def test_store_overwritingNsValues(self): s = remoteuserinfo.RemoteUserInfo(self.datadir) token = s.store(ns(dato1='valor1'), dato1='valor2') stored = ns.load(s._tokenfile(token)) self.assertEqual(stored.dato1, 'valor2')
def retrieve(self, token): filename = self._tokenfile(token) try: return ns.load(filename) except IOError: raise packaging.NoSuchUuid(token)
def test_store(self): s = remoteuserinfo.RemoteUserInfo(self.datadir) token = s.store(dato1='valor1') stored = ns.load(s._tokenfile(token)) self.assertEqual(stored.dato1, 'valor1')
def createPlantmonitorDB(self): configdb = ns.load('conf/configdb_test.yaml') return PlantmonitorDB(configdb)
def createConfig(self): configdb = ns.load('conf/configdb_test.yaml') #TODO add dump database setting (if we end up not replicating it) configdb['psql_db'] = "testdump" return configdb
def createConfig(self): return ns.load('conf/configdb_test.yaml')
def update_kwh(filename): if filename: aggr_name=ns.load(filename)['generationkwh']['name'] aggr_id=getAggregator(aggr_name) aggr_obj.update_kwh(aggr_id)
def __iter__(self): wildcard = os.path.join(self.datadir, '*.yaml') return ( ns.load(filename) for filename in sorted(glob.glob(wildcard)) )
''' Function taken from: webforms/webforms/model.py --> new_soci() Returns the Somenergia soci, or creates it if it does not exist. ''' soci_ids = t.SomenergiaSoci.search([ ('partner_id','=',partner_id), ]) if soci_ids: return soci_ids[0] return t.SomenergiaSoci.create_one_soci(partner_id) if __name__ == '__main__': step('Loading config file...') try: config = ns.load("config.yaml") except: error("Check config.yaml") raise step("Find and create 'socis' with no record in somenergia_soci") find_and_fix_soci_record(config.query_no_record_socis.sql, config.query_no_record_socis.output) step("Get socis considering: {}", config.queryfile1.sql) migrate_socis(config, config.queryfile1.sql, config.queryfile1.output) step("Migration completed!")
def config(): if config.data is None or debug is True: config.data = ns() for configfile in sys.argv[1:] or ['scripts.yaml']: config.data.update(ns.load(configfile)) return config.data
def importPlantsFromFile(db, yamlFilename): nsplants = ns.load(yamlFilename) with orm.db_session: importPlants(db, nsplants)