def test_drop_measurement(self): """Test drop measurement for TestInfluxDBClient object.""" cli = DataFrameClient(database='db') with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/query", text='{"results":[{}]}') cli.drop_measurement('new_measurement') self.assertEqual(m.last_request.qs['q'][0], 'drop measurement "new_measurement"')
def test_drop_measurement(self): """Test drop measurement for TestInfluxDBClient object.""" cli = DataFrameClient(database='db') with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/query", text='{"results":[{}]}' ) cli.drop_measurement('new_measurement') self.assertEqual( m.last_request.qs['q'][0], 'drop measurement "new_measurement"' )
def write_data_to_influxdb(data): influxClient = DataFrameClient("localhost", 8087, "admin", "admin", "corona_data") influxClient.drop_measurement("dus") influxClient.write_points(data, "dus") influxClient.close()
def optimization(): # Optimization timestep TIMESTEP = 12 # 5min interval (60/5) # Query uncontrolled demand # Note: uncontrolled demand is already on a 5min timestep client = DataFrameClient(host, port, user, password, dbname) start = datetime.now() query = ("select * from uncontr " + "WHERE time >= '" + (start + timedelta(minutes=5)).strftime("%Y-%m-%dT%H:%M:%SZ") + "' AND time <= '" + (start + timedelta(hours=24)).strftime("%Y-%m-%dT%H:%M:%SZ") + "'") uncontr = client.query(query)['uncontr'] # Get the reference t=0 first_t = uncontr.iloc[0].name uncontr_t = uncontr.index uncontr_index = list(range(0, len(uncontr_t))) # Change index to integers and rename column opt_uncontr = uncontr.copy() opt_uncontr['index'] = uncontr_index opt_uncontr.set_index('index', drop=True, inplace=True) opt_uncontr.rename(columns={'uncontr': 'p'}, inplace=True) # Query order books try: query = ("select * from bbook " + "WHERE startby >= " + str(int((start + timedelta(minutes=5)).timestamp() * 1000)) + " AND endby <= " + str(int((start + timedelta(hours=24)).timestamp() * 1000))) bbook = client.query(query)['bbook'] # Set startby and endby as integers opt_bbook = bbook.copy() opt_bbook['startby'] -= first_t.timestamp() * 1000 opt_bbook['startby'] /= 60 * 1000 * 60 / TIMESTEP opt_bbook['endby'] -= first_t.timestamp() * 1000 opt_bbook['endby'] /= 60 * 1000 * 60 / TIMESTEP opt_bbook['id'] = list(range(0, len(opt_bbook))) opt_bbook.set_index('id', drop=True, inplace=True) except: # No orders at the moment opt_bbook = pandas.DataFrame() try: query = ("select * from sbook " + "WHERE startby >= " + str(int((start + timedelta(minutes=5)).timestamp() * 1000)) + " AND endby <= " + str(int((start + timedelta(hours=24)).timestamp() * 1000))) sbook = client.query(query)['sbook'] # Set startby and endby as integers opt_sbook = sbook.copy() opt_sbook['startby'] -= first_t.timestamp() * 1000 opt_sbook['startby'] /= 60 * 1000 * 60 / TIMESTEP opt_sbook['endby'] -= first_t.timestamp() * 1000 opt_sbook['endby'] /= 60 * 1000 * 60 / TIMESTEP opt_sbook['id'] = list(range(0, len(opt_sbook))) opt_sbook.set_index('id', drop=True, inplace=True) except: # No orders at the moment opt_sbook = pandas.DataFrame() try: query = ("select * from dbook " + "WHERE startby >= " + str(int((start + timedelta(minutes=5)).timestamp() * 1000)) + " AND endby <= " + str(int((start + timedelta(hours=24)).timestamp() * 1000))) dbook = client.query(query)['dbook'] opt_dbook = dbook.copy() opt_dbook['startby'] -= first_t.timestamp() * 1000 opt_dbook['startby'] /= 60 * 1000 * 60 / TIMESTEP opt_dbook['endby'] -= first_t.timestamp() * 1000 opt_dbook['endby'] /= 60 * 1000 * 60 / TIMESTEP opt_dbook['id'] = list(range(0, len(opt_dbook))) opt_dbook.set_index('id', drop=True, inplace=True) # Turn profile_kw from str to floats opt_dbook['profile_kw'] = opt_dbook['profile_kw'].apply( lambda x: [float(v) for v in x[1:][:-1].replace(" ", "").split(',')]) except: # No orders at the moment opt_dbook = pandas.DataFrame() # Run the optimization tic = datetime.now() result = maximize_self_consumption(opt_uncontr, opt_bbook, opt_sbook, opt_dbook, timestep=1 / TIMESTEP, solver='glpk', verbose=False, timelimit=60) logger.info('GLPK time elapsed (hh:mm:ss.ms) {}'.format(datetime.now() - tic)) # Save results back to influxDB (and remove previous schedule) total = uncontr.copy() total.rename(columns={'uncontr': 'contr'}, inplace=True) total['contr'] += result['demand_controllable'] client.write_points(total, 'contr') client.drop_measurement('bschedule') if result['batteryin'] is not None: bschedule = (result['batteryin'] - result['batteryout']).copy() bschedule['index'] = uncontr_t bschedule.set_index('index', drop=True, inplace=True) bschedule.rename_axis(None, inplace=True) client.write_points(bschedule, 'bschedule') client.drop_measurement('sschedule') if result['demandshape'] is not None: sschedule = result['demandshape'].copy() sschedule['index'] = uncontr_t sschedule.set_index('index', drop=True, inplace=True) sschedule.rename_axis(None, inplace=True) client.write_points(sschedule, 'sschedule') client.drop_measurement('dschedule') if result['demanddeferr'] is not None: dschedule = result['demanddeferr'].copy() dschedule['index'] = uncontr_t dschedule.set_index('index', drop=True, inplace=True) dschedule.rename_axis(None, inplace=True) client.write_points(dschedule, 'dschedule') # Close DB connection client.close()