def main(test=False): """This function is the main body of the SDS Time Series sample script""" exception = None try: config = configparser.ConfigParser() config.read('config.ini') namespace_id = config.get('Configurations', 'Namespace') # step 1 ocs_client: OCSClient = OCSClient( config.get('Access', 'ApiVersion'), config.get('Access', 'Tenant'), config.get('Access', 'Resource'), config.get('Credentials', 'ClientId'), config.get('Credentials', 'ClientSecret'), False) # step 2 print('Creating value and time type') time_value_type = get_type_value_time() time_value_type = ocs_client.Types.getOrCreateType( namespace_id, time_value_type) # step 3 print('Creating a stream for pressure and temperature') pressure_stream = SdsStream( id=STREAM_PRESSURE_NAME, typeId=time_value_type.Id, description="A stream for pressure data of tank1") ocs_client.Streams.createOrUpdateStream(namespace_id, pressure_stream) temperature_stream = SdsStream( id=STREAM_TEMP_NAME, typeId=time_value_type.Id, description="A stream for temperature data of tank1") ocs_client.Streams.createOrUpdateStream(namespace_id, temperature_stream) # step 4 ocs_client.Streams.insertValues(namespace_id, pressure_stream.Id, json.dumps((get_pressure_data()))) ocs_client.Streams.insertValues(namespace_id, temperature_stream.Id, json.dumps((get_temperature_data()))) # step 5 print('Creating a tank type that has both stream and temperature') tank_type = get_type_press_temp_time() tank_type = ocs_client.Types.getOrCreateType(namespace_id, tank_type) # step 6 print('Creating a tank stream') tank_stream = SdsStream(id=STREAM_TANK_1, typeId=tank_type.Id, description="A stream for data of tank1s") ocs_client.Streams.createOrUpdateStream(namespace_id, tank_stream) # step 7 ocs_client.Streams.insertValues(namespace_id, STREAM_TANK_1, json.dumps(get_data())) print() print() print('Looking at the data in the system. In this case we have some' 'null values that are encoded as 0 for the value.') data = get_data() tank_1_sorted = sorted(data, key=lambda x: x['time'], reverse=False) print() print('Value we sent:') print(tank_1_sorted[1]) first_time = tank_1_sorted[0]['time'] last_time = tank_1_sorted[-1]['time'] # step 8 results = ocs_client.Streams.getWindowValues(namespace_id, STREAM_PRESSURE_NAME, None, first_time, last_time) print() print('Value from pressure stream:') print((results)[1]) print() print('Value from tank1 stream:') results = ocs_client.Streams.getWindowValues(namespace_id, STREAM_TANK_1, None, first_time, last_time) print((results)[1]) # step 9 print() print() print("turning on verbosity") ocs_client.acceptverbosity = True print("This means that will get default values back (in our case" " 0.0 since we are looking at doubles)") print() print('Value from pressure stream:') results = ocs_client.Streams.getWindowValues(namespace_id, STREAM_PRESSURE_NAME, None, first_time, last_time) print((results)[1]) print() print('Value from tank1 stream:') results = ocs_client.Streams.getWindowValues(namespace_id, STREAM_TANK_1, None, first_time, last_time) print((results)[1]) # step 10 print() print() print("Getting data summary") # the count of 1 refers to the number of intervals requested summary_results = ocs_client.Streams.getSummaries( namespace_id, STREAM_TANK_1, None, first_time, last_time, 1) print(summary_results) print() print() print('Now we want to look at data across multiple tanks.') print('For that we can take advantage of bulk stream calls') print('Creating new tank streams') tank_stream = SdsStream(id=STREAM_TANK_2, typeId=tank_type.Id, description="A stream for data of tank2") ocs_client.Streams.createOrUpdateStream(namespace_id, tank_stream) data_tank_2 = get_data_tank_2() ocs_client.Streams.insertValues(namespace_id, STREAM_TANK_2, json.dumps(get_data_tank_2())) tank_2_sorted = sorted(data_tank_2, key=lambda x: x['time'], reverse=False) first_time_tank_2 = tank_2_sorted[0]['time'] last_time_tank_2 = tank_2_sorted[-1]['time'] tank_stream = SdsStream(id=STREAM_TANK_0, typeId=tank_type.Id, description="") ocs_client.Streams.createOrUpdateStream(namespace_id, tank_stream) ocs_client.Streams.insertValues(namespace_id, STREAM_TANK_0, json.dumps(get_data())) time.sleep(10) # step 11 print('Getting bulk call results') results = ocs_client.Streams.getStreamsWindow( namespace_id, [STREAM_TANK_0, STREAM_TANK_2], None, first_time_tank_2, last_time_tank_2) print(results) except Exception as ex: exception = ex print(f"Encountered Error: {ex}") print() finally: # step 12 print() print() print() print("Cleaning up") print("Deleting the stream") suppress_error(lambda: ocs_client.Streams.deleteStream( namespace_id, STREAM_PRESSURE_NAME)) suppress_error(lambda: ocs_client.Streams.deleteStream( namespace_id, STREAM_TEMP_NAME)) suppress_error(lambda: ocs_client.Streams.deleteStream( namespace_id, STREAM_TANK_0)) suppress_error(lambda: ocs_client.Streams.deleteStream( namespace_id, STREAM_TANK_1)) suppress_error(lambda: ocs_client.Streams.deleteStream( namespace_id, STREAM_TANK_2)) print("Deleting the types") suppress_error(lambda: ocs_client.Types.deleteType( namespace_id, TYPE_PRESSURE_TEMPERATURE_TIME_NAME)) suppress_error(lambda: ocs_client.Types.deleteType( namespace_id, TYPE_VALUE_TIME_NAME)) if test and exception is not None: raise exception print('Complete!')
def main(test=False): """This function is the main body of the SDS sample script""" exception = None try: config = configparser.ConfigParser() config.read('config.ini') # Step 1 tenant_id = config.get('Access', 'Tenant') namespace_id = config.get('Configurations', 'Namespace') if tenant_id == 'default': sds_client = EDSClient(config.get('Access', 'ApiVersion'), config.get('Access', 'Resource')) else: sds_client = OCSClient(config.get('Access', 'ApiVersion'), config.get('Access', 'Tenant'), config.get('Access', 'Resource'), config.get('Credentials', 'ClientId'), config.get('Credentials', 'ClientSecret')) namespace_id = config.get('Configurations', 'Namespace') print(r"------------------------------------------") print(r" _________ .___ __________ ") print(r" / _____/ __| _/_____\______ \___.__.") print(r" \_____ \ / __ |/ ___/| ___< | |") print(r" / \/ /_/ |\___ \ | | \___ |") print(r"/_______ /\____ /____ >|____| / ____|") print(r" \/ \/ \/ \/ ") print(r"------------------------------------------") print("Sds endpoint at {url}".format(url=sds_client.uri)) print() # Step 2 ####################################################################### # SdsType get or creation ####################################################################### print("Creating an SdsType") wave_type = get_wave_data_type(SAMPLE_TYPE_ID) wave_type = sds_client.Types.getOrCreateType(namespace_id, wave_type) assert wave_type.Id == SAMPLE_TYPE_ID, "Error getting back wave Type" # Step 3 ####################################################################### # Sds Stream creation ####################################################################### print("Creating an SdsStream") stream = SdsStream() stream.Id = SAMPLE_STREAM_ID stream.Name = "WaveStreamPySample" stream.Description = "A Stream to store the WaveData events" stream.TypeId = wave_type.Id sds_client.Streams.createOrUpdateStream(namespace_id, stream) # Step 4 ####################################################################### # CRUD operations for events ####################################################################### print("Inserting data") # Insert a single event event = next_wave(0, 2.0) sds_client.Streams.insertValues(namespace_id, stream.Id, [event]) # Insert a list of events waves = [] for error in range(2, 20, 2): waves.append(next_wave(error, 2.0)) sds_client.Streams.insertValues(namespace_id, stream.Id, waves) # Step 5 # Get the last inserted event in a stream print("Getting latest event") wave = sds_client.Streams.getLastValue(namespace_id, stream.Id, WaveData) print(to_string(wave)) print() # Get all the events waves = sds_client.Streams.getWindowValues(namespace_id, stream.Id, WaveData, 0, 180) print("Getting all events") print("Total events found: " + str(len(waves))) for wave in waves: print(to_string(wave)) print() # Step 6 # get all values with headers waves = sds_client.Streams.getWindowValuesForm(namespace_id, stream.Id, None, 0, 180, "tableh") print("Getting all events in table format") print(waves) # Step 7 print("Updating events") # Update the first event event = next_wave(0, 4.0) sds_client.Streams.updateValues(namespace_id, stream.Id, [event]) # Update the rest of the events, adding events that have no prior # index entry updated_events = [] for error in range(2, 40, 2): event = next_wave(error, 4.0) updated_events.append(event) sds_client.Streams.updateValues(namespace_id, stream.Id, updated_events) # Get all the events waves = sds_client.Streams.getWindowValues(namespace_id, stream.Id, WaveData, 0, 40) print("Getting updated events") print("Total events found: " + str(len(waves))) for wave in waves: print(to_string(wave)) print() # Step 8 print("Replacing events") # replace one value event = next_wave(0, 5.0) sds_client.Streams.replaceValues(namespace_id, stream.Id, [event]) # replace multiple values replaced_events = [] for error in range(2, 40, 2): event = next_wave(error, 5.0) replaced_events.append(event) sds_client.Streams.replaceValues(namespace_id, stream.Id, replaced_events) # Step 9 # Get all the events waves = sds_client.Streams.getWindowValues(namespace_id, stream.Id, WaveData, 0, 180) print("Getting replaced events") print("Total events found: " + str(len(waves))) for wave in waves: print(to_string(wave)) print() retrieved_interpolated = sds_client.Streams.getRangeValuesInterpolated( namespace_id, stream.Id, None, "5", "32", 4) print("Sds can interpolate or extrapolate data at an index location " "where data does not explicitly exist:") print(retrieved_interpolated) print() # Step 10 # Filtering from all values print("Getting filtered events") filtered_events = sds_client.Streams.getWindowValues( namespace_id, SAMPLE_STREAM_ID, WaveData, 0, 50, 'Radians lt 3') print("Total events found: " + str(len(filtered_events))) for wave in filtered_events: print(to_string(wave)) print() # Step 11 # Sampling from all values print("Getting sampled values") sampled_waves = sds_client.Streams.getSampledValues( namespace_id, stream.Id, WaveData, 0, 40, "sin", 4) print("Total events found: " + str(len(sampled_waves))) for wave in sampled_waves: print(to_string(wave)) print() # Step 12 ####################################################################### # Property Overrides ####################################################################### print("Property Overrides") print("Sds can interpolate or extrapolate data at an index location " "where data does not explicitly exist:") print() # We will retrieve three events using the default behavior, Continuous waves = sds_client.Streams.getRangeValues( namespace_id, stream.Id, WaveData, "1", 0, 3, False, SdsBoundaryType.ExactOrCalculated) print("Default (Continuous) requesting data starting at index location" " '1', where we have not entered data, Sds will interpolate a " "value for each property:") for wave in waves: print(("Order: {order}: Radians: {radians} Cos: {cos}".format( order=wave.order, radians=wave.radians, cos=wave.cos))) # Create a Discrete stream PropertyOverride indicating that we do not # want Sds to calculate a value for Radians and update our stream property_override = SdsStreamPropertyOverride() property_override.SdsTypePropertyId = 'Radians' property_override.InterpolationMode = 3 # update the stream props = [property_override] stream.PropertyOverrides = props sds_client.Streams.createOrUpdateStream(namespace_id, stream) waves = sds_client.Streams.getRangeValues( namespace_id, stream.Id, WaveData, "1", 0, 3, False, SdsBoundaryType.ExactOrCalculated) print() print("We can override this read behavior on a property by property" "basis, here we override the Radians property instructing Sds" " not to interpolate.") print("Sds will now return the default value for the data type:") for wave in waves: print(("Order: {order}: Radians: {radians} Cos: {cos}".format( order=wave.order, radians=wave.radians, cos=wave.cos))) # Step 13 ####################################################################### # Stream Views ####################################################################### # Create additional types to define our targets wave_target_type = get_wave_data_target_type(SAMPLE_TARGET_TYPE_ID) wave_target_type = sds_client.Types.getOrCreateType( namespace_id, wave_target_type) wave_integer_type = get_wave_data_integer_type(SAMPLE_INTEGER_TYPE_ID) wave_integer_type = sds_client.Types.getOrCreateType( namespace_id, wave_integer_type) # Create an SdsStreamViewProperty objects when we want to explicitly # map one property to another vp1 = SdsStreamViewProperty() vp1.SourceId = "Order" vp1.TargetId = "OrderTarget" vp2 = SdsStreamViewProperty() vp2.SourceId = "Sin" vp2.TargetId = "SinInt" vp3 = SdsStreamViewProperty() vp3.SourceId = "Cos" vp3.TargetId = "CosInt" vp4 = SdsStreamViewProperty() vp4.SourceId = "Tan" vp4.TargetId = "TanInt" # Create a streamView mapping our original type to our target type, # data shape is the same so let Sds handle the mapping stream_view = SdsStreamView() stream_view.Id = SAMPLE_STREAM_VIEW_ID stream_view.Name = "SampleStreamView" stream_view.TargetTypeId = wave_target_type.Id stream_view.SourceTypeId = wave_type.Id # Data shape and data types are different so include explicit mappings # between properties manual_stream_view = SdsStreamView() manual_stream_view.Id = SAMPLE_STREAM_VIEW_INT_ID manual_stream_view.Name = "SampleIntStreamView" manual_stream_view.TargetTypeId = wave_integer_type.Id manual_stream_view.SourceTypeId = wave_type.Id manual_stream_view.Properties = [vp1, vp2, vp3, vp4] automatic_stream_view = sds_client.Streams.getOrCreateStreamView( namespace_id, stream_view) manual_stream_view = sds_client.Streams.getOrCreateStreamView( namespace_id, manual_stream_view) stream_view_map_1 = SdsStreamViewMap() stream_view_map_1 = sds_client.Streams.getStreamViewMap( namespace_id, automatic_stream_view.Id) stream_view_map_2 = SdsStreamViewMap() stream_view_map_2 = sds_client.Streams.getStreamViewMap( namespace_id, manual_stream_view.Id) range_waves = sds_client.Streams.getRangeValues( namespace_id, stream.Id, WaveData, "1", 0, 3, False, SdsBoundaryType.ExactOrCalculated) print() print("SdsStreamViews") print("Here is some of our data as it is stored on the server:") for way in range_waves: print(("Sin: {sin}, Cos: {cos}, Tan: {tan}".format(sin=way.sin, cos=way.cos, tan=way.tan))) # StreamView data when retrieved with a streamView range_waves = sds_client.Streams.getRangeValues( namespace_id, stream.Id, WaveDataTarget, "1", 0, 3, False, SdsBoundaryType.ExactOrCalculated, automatic_stream_view.Id) print() print("Specifying a streamView with an SdsType of the same shape" "returns values that are automatically mapped to the target" " SdsType's properties:") for way in range_waves: print(("SinTarget: {sinTarget}, CosTarget: {cosTarget}, TanTarget:" " {tanTarget}").format(sinTarget=way.sin_target, cosTarget=way.cos_target, tanTarget=way.tan_target)) range_waves = sds_client.Streams.getRangeValues( namespace_id, stream.Id, WaveDataInteger, "1", 0, 3, False, SdsBoundaryType.ExactOrCalculated, manual_stream_view.Id) print() print("SdsStreamViews can also convert certain types of data, here we" " return integers where the original values were doubles:") for way in range_waves: print(( "SinInt: {sinInt}, CosInt: {cosInt}, TanInt: {tanInt}").format( sinInt=way.sin_int, cosInt=way.cos_int, tanInt=way.tan_int)) print() print("We can query Sds to return the SdsStreamViewMap for our " "SdsStreamView, here is the one generated automatically:") for prop in stream_view_map_1.Properties: print(("{source} => {dest}".format(source=prop.SourceId, dest=prop.TargetId))) print() print("Here is our explicit mapping, note SdsStreamViewMap will return" " all properties of the Source Type, even those without a " "corresponding Target property:") for prop in stream_view_map_2.Properties: if hasattr(prop, 'TargetId'): print(("{source} => {dest}".format(source=prop.SourceId, dest=prop.TargetId))) else: print(("{source} => {dest}".format(source=prop.SourceId, dest='Not mapped'))) # Step 14 print("We will now update the stream type based on the streamview") first_val = sds_client.Streams.getFirstValue(namespace_id, stream.Id, None) sds_client.Streams.updateStreamType(namespace_id, stream.Id, SAMPLE_STREAM_VIEW_ID) new_stream = sds_client.Streams.getStream(namespace_id, SAMPLE_STREAM_ID) first_val_updated = sds_client.Streams.getFirstValue( namespace_id, SAMPLE_STREAM_ID, None) print("The new type id" + new_stream.TypeId + " compared to the " "original one " + stream.TypeId) print("The new type value " + str(first_val) + " compared to the " "original one " + str(first_val_updated)) # Step 15 types = sds_client.Types.getTypes(namespace_id, 0, 100) types_query = sds_client.Types.getTypes(namespace_id, 0, 100, "Id:*Target*") print() print("All Types: ") for type_i in types: print(type_i.Id) print("Types after Query: ") for type_i in types_query: print(type_i.Id) if tenant_id != 'default': # Step 16 ####################################################################### # Tags and Metadata (OCS ONLY) ####################################################################### print() print("Let's add some Tags and Metadata to our stream:") tags = ["waves", "periodic", "2018", "validated"] metadata = { "Region": "North America", "Country": "Canada", "Province": "Quebec" } sds_client.Streams.createOrUpdateTags(namespace_id, stream.Id, tags) sds_client.Streams.createOrUpdateMetadata(namespace_id, stream.Id, metadata) print() print("Tags now associated with ", stream.Id) print(sds_client.Streams.getTags(namespace_id, stream.Id)) region = sds_client.Streams.getMetadata(namespace_id, stream.Id, "Region") country = sds_client.Streams.getMetadata(namespace_id, stream.Id, "Country") province = sds_client.Streams.getMetadata(namespace_id, stream.Id, "Province") print() print("Metadata now associated with", stream.Id, ":") print("Metadata key Region: ", region) print("Metadata key Country: ", country) print("Metadata key Province: ", province) print() # Step 17 ####################################################################### # Update Metadata (OCS ONLY) ####################################################################### print() print("Let's update the Metadata on our stream:") patch = [{ "op": "remove", "path": "/Region" }, { "op": "replace", "path": "/Province", "value": "Ontario" }, { "op": "add", "path": "/City", "value": "Toronto" }] sds_client.Streams.patchMetadata(namespace_id, stream.Id, patch) country = sds_client.Streams.getMetadata(namespace_id, stream.Id, "Country") province = sds_client.Streams.getMetadata(namespace_id, stream.Id, "Province") city = sds_client.Streams.getMetadata(namespace_id, stream.Id, "City") print() print("Metadata now associated with", stream.Id, ":") print("Metadata key Country: ", country) print("Metadata key Province: ", province) print("Metadata key City: ", city) print() # Step 17 ####################################################################### # Delete events ####################################################################### print() print('Deleting values from the SdsStream') # remove a single value from the stream sds_client.Streams.removeValue(namespace_id, stream.Id, 0) # remove multiple values from the stream sds_client.Streams.removeWindowValues(namespace_id, stream.Id, 0, 40) try: event = sds_client.Streams.getLastValue(namespace_id, stream.Id, WaveData) if event is not None: raise ValueError except TypeError: pass print("All values deleted successfully!") # Step 18 print("Adding a stream with a secondary index.") index = SdsStreamIndex() index.SdsTypePropertyId = "Radians" secondary = SdsStream() secondary.Id = STREAM_ID_SECONDARY secondary.TypeId = SAMPLE_TYPE_ID secondary.Indexes = [index] secondary = sds_client.Streams.getOrCreateStream( namespace_id, secondary) count = 0 if stream.Indexes: count = len(stream.Indexes) print("Secondary indexes on streams original:" + str(count) + ". New one: " + str(len(secondary.Indexes))) print() # Modifying an existing stream with a secondary index. print("Modifying a stream to have a secondary index.") sample_stream = sds_client.Streams.getStream(namespace_id, SAMPLE_STREAM_ID) index = SdsStreamIndex() index.SdsTypePropertyId = "RadiansTarget" sample_stream.Indexws = [index] sds_client.Streams.createOrUpdateStream(namespace_id, sample_stream) sample_stream = sds_client.Streams.getStream(namespace_id, SAMPLE_STREAM_ID) # Modifying an existing stream to remove the secondary index print("Removing a secondary index from a stream.") secondary.Indexes = [] sds_client.Streams.createOrUpdateStream(namespace_id, secondary) secondary = sds_client.Streams.getStream(namespace_id, secondary.Id) original_length = "0" if stream.Indexes: original_length = str(len(stream.Indexes)) secondary_length = "0" if secondary.Indexes: secondary_length = str(len(secondary.Indexes)) print("Secondary indexes on streams original:" + original_length + ". New one: " + secondary_length) # Step 19 # Adding Compound Index Type print("Creating an SdsType with a compound index") type_compound = get_wave_compound_data_type(COMPOUND_TYPE_ID) sds_client.Types.getOrCreateType(namespace_id, type_compound) # create an SdsStream print("Creating an SdsStream off of type with compound index") stream_compound = SdsStream() stream_compound.Id = STREAM_ID_COMPOUND stream_compound.TypeId = type_compound.Id sds_client.Streams.createOrUpdateStream(namespace_id, stream_compound) # Step 20 print("Inserting data") waves = [] waves.append(next_wave(1, 10)) waves.append(next_wave(2, 2)) waves.append(next_wave(3, 1)) waves.append(next_wave(10, 3)) waves.append(next_wave(10, 8)) waves.append(next_wave(10, 10)) sds_client.Streams.insertValues(namespace_id, STREAM_ID_COMPOUND, waves) latest_compound = sds_client.Streams.getLastValue( namespace_id, STREAM_ID_COMPOUND, None) first_compound = sds_client.Streams.getFirstValue( namespace_id, STREAM_ID_COMPOUND, None) window_val = sds_client.Streams.getWindowValues( namespace_id, STREAM_ID_COMPOUND, None, "2|1", "10|8") print("First data: " + str(first_compound) + " Latest data: " + str(latest_compound)) print("Window Data:") print(str(window_val)) except Exception as error: print((f'Encountered Error: {error}')) print() traceback.print_exc() print() exception = error finally: # Step 21 ####################################################################### # SdsType, SdsStream, and SdsStreamView deletion ####################################################################### # Clean up the remaining artifacts print("Cleaning up") print("Deleting the stream") suppress_error(lambda: sds_client.Streams.deleteStream( namespace_id, SAMPLE_STREAM_ID)) suppress_error(lambda: sds_client.Streams.deleteStream( namespace_id, STREAM_ID_SECONDARY)) suppress_error(lambda: sds_client.Streams.deleteStream( namespace_id, STREAM_ID_COMPOUND)) print("Deleting the streamViews") suppress_error(lambda: sds_client.Streams.deleteStreamView( namespace_id, SAMPLE_STREAM_VIEW_ID)) suppress_error(lambda: sds_client.Streams.deleteStreamView( namespace_id, SAMPLE_STREAM_VIEW_INT_ID)) print("Deleting the types") suppress_error( lambda: sds_client.Types.deleteType(namespace_id, SAMPLE_TYPE_ID)) suppress_error(lambda: sds_client.Types.deleteType( namespace_id, SAMPLE_TARGET_TYPE_ID)) suppress_error(lambda: sds_client.Types.deleteType( namespace_id, SAMPLE_INTEGER_TYPE_ID)) suppress_error(lambda: sds_client.Types.deleteType( namespace_id, COMPOUND_TYPE_ID)) if test and exception is not None: raise exception print('Complete!')
def main(test=False): """This function is the main body of the Data View sample script""" exception = None config = configparser.ConfigParser() config.read('config.ini') print( '--------------------------------------------------------------------') print( ' ###### # # ###### # # ') print( ' # # ## ##### ## # # # ###### # # # # # # ') print( ' # # # # # # # # # # # # # # # # # ') print( ' # # # # # # # # # # ##### # # ###### # ') print( ' # # ###### # ###### # # # # # ## # # # ') print( ' # # # # # # # # # # # ## ## # # ') print( ' ###### # # # # # ## # ###### # # # # ') print( '--------------------------------------------------------------------') # Step 1 print() print('Step 1: Authenticate against OCS') ocs_client = OCSClient(config.get('Access', 'ApiVersion'), config.get('Access', 'Tenant'), config.get('Access', 'Resource'), config.get('Credentials', 'ClientId'), config.get('Credentials', 'ClientSecret')) namespace_id = config.get('Configurations', 'Namespace') print(namespace_id) print(ocs_client.uri) try: # Step 2 print() print('Step 2: Create types, streams, and data') times = create_data(namespace_id, ocs_client) sample_start_time = times[0] sample_end_time = times[1] # Step 3 print() print('Step 3: Create a data view') dataview = DataView(id=SAMPLE_DATAVIEW_ID, name=SAMPLE_DATAVIEW_NAME, description=SAMPLE_DATAVIEW_DESCRIPTION) ocs_client.DataViews.postDataView(namespace_id, dataview) # Step 4 print() print('Step 4: Retrieve the data view') dataview = ocs_client.DataViews.getDataView(namespace_id, SAMPLE_DATAVIEW_ID) print(dataview.toJson()) # Step 5 print() print('Step 5: Add a query for data items') query = Query(id=SAMPLE_QUERY_ID, value=SAMPLE_QUERY_STRING) dataview.Queries.append(query) # No Data View returned, success is 204 ocs_client.DataViews.putDataView(namespace_id, dataview) # Step 6 print() print('Step 6: View items found by the query') print('List data items found by the query:') data_items = ocs_client.DataViews.getResolvedDataItems( namespace_id, SAMPLE_DATAVIEW_ID, SAMPLE_QUERY_ID) print(data_items.toJson()) print('List ineligible data items found by the query:') data_items = ocs_client.DataViews.getResolvedIneligibleDataItems( namespace_id, SAMPLE_DATAVIEW_ID, SAMPLE_QUERY_ID) print(data_items.toJson()) # Step 7 print() print('Step 7: View fields available to include in the data view') available_fields = ocs_client.DataViews.getResolvedAvailableFieldSets( namespace_id, SAMPLE_DATAVIEW_ID) print(available_fields.toJson()) # Step 8 print() print('Step 8: Include some of the available fields') dataview.DataFieldSets = available_fields.Items ocs_client.DataViews.putDataView(namespace_id, dataview) print('List available field sets:') available_fields = ocs_client.DataViews.getResolvedAvailableFieldSets( namespace_id, SAMPLE_DATAVIEW_ID) print(available_fields.toJson()) print('Retrieving data from the data view:') dataview_data = ocs_client.DataViews.getDataInterpolated( namespace_id=namespace_id, dataView_id=SAMPLE_DATAVIEW_ID, startIndex=sample_start_time, endIndex=sample_end_time, interval=SAMPLE_INTERVAL) print(str(dataview_data)) print(len(dataview_data)) assert len(dataview_data) > 0, 'Error getting data view data' # Step 9 print() print('Step 9: Group the data view') grouping = Field(source=FieldSource.Id, label='{DistinguisherValue} {FirstKey}') dataview.GroupingFields.append(grouping) # No DataView returned, success is 204 ocs_client.DataViews.putDataView(namespace_id, dataview) print('Retrieving data from the data view:') dataview_data = ocs_client.DataViews.getDataInterpolated( namespace_id=namespace_id, dataView_id=SAMPLE_DATAVIEW_ID, startIndex=sample_start_time, endIndex=sample_end_time, interval=SAMPLE_INTERVAL) print(str(dataview_data)) assert len(dataview_data) > 0, 'Error getting data view data' # Step 10 print() print('Step 10: Identify data items') identify = dataview.GroupingFields.pop() dataview_dataitem_fieldset = find_fieldset(dataview.DataFieldSets, SAMPLE_QUERY_ID) dataview_dataitem_fieldset.IdentifyingField = identify # No Data View returned, success is 204 ocs_client.DataViews.putDataView(namespace_id, dataview) print('Retrieving data from the data view:') dataview_data = ocs_client.DataViews.getDataInterpolated( namespace_id=namespace_id, dataView_id=SAMPLE_DATAVIEW_ID, startIndex=sample_start_time, endIndex=sample_end_time, interval=SAMPLE_INTERVAL) print(str(dataview_data)) assert len(dataview_data) > 0, 'Error getting data view data' # Step 11 print() print('Step 11: Consolidate data fields') field1 = find_field_key(dataview_dataitem_fieldset.DataFields, FieldSource.PropertyId, SAMPLE_FIELD_TO_CONSOLIDATE_TO) field2 = find_field_key(dataview_dataitem_fieldset.DataFields, FieldSource.PropertyId, SAMPLE_FIELD_TO_CONSOLIDATE) print(field1.toJson()) print(field2.toJson()) field1.Keys.append(SAMPLE_FIELD_TO_CONSOLIDATE) dataview_dataitem_fieldset.DataFields.remove(field2) # No Data View returned, success is 204 ocs_client.DataViews.putDataView(namespace_id, dataview) print('Retrieving data from the data view:') dataview_data = ocs_client.DataViews.getDataInterpolated( namespace_id=namespace_id, dataView_id=SAMPLE_DATAVIEW_ID, startIndex=sample_start_time, endIndex=sample_end_time, interval=SAMPLE_INTERVAL) print(str(dataview_data)) assert len(dataview_data) > 0, 'Error getting data view data' except Exception as error: print((f'Encountered Error: {error}')) print() traceback.print_exc() print() exception = error finally: ####################################################################### # Data View deletion ####################################################################### # Step 12 print() print('Step 12: Delete sample objects from OCS') print('Deleting data view...') suppress_error(lambda: ocs_client.DataViews.deleteDataView( namespace_id, SAMPLE_DATAVIEW_ID)) # check, including assert is added to make sure we deleted it dataview = None try: dataview = ocs_client.DataViews.getDataView( namespace_id, SAMPLE_DATAVIEW_ID) except Exception as error: # Exception is expected here since Data View has been deleted dataview = None finally: assert dataview is None, 'Delete failed' print('Verification OK: Data View deleted') print('Deleting sample streams...') suppress_error(lambda: ocs_client.Streams.deleteStream( namespace_id, SAMPLE_STREAM_ID_1)) suppress_error(lambda: ocs_client.Streams.deleteStream( namespace_id, SAMPLE_STREAM_ID_2)) print('Deleting sample types...') suppress_error(lambda: ocs_client.Types.deleteType( namespace_id, SAMPLE_TYPE_ID_1)) suppress_error(lambda: ocs_client.Types.deleteType( namespace_id, SAMPLE_TYPE_ID_2)) if test and exception is not None: raise exception print('Complete!')
from ocs_sample_library_preview import OCSClient, Streams import configparser config = configparser.ConfigParser() config.read('config.ini') client: OCSClient = OCSClient(config.get('Access', 'ApiVersion'),config.get('Access', 'Tenant'), config.get('Access', 'Resource'), config.get('Credentials', 'ClientId'), config.get('Credentials', 'ClientSecret')) namespaceId = config.get('Configurations', 'Namespace') #This doesn't seem necessary, but it allows VS Code to figure out the intellisense and what is going on. Not needed during an import, just in the local test. #Maybe there is a local setting to try here sss: Streams = client.Streams streams = client.Streams.getStreams(namespaceId) for stream in streams: print(stream.toJson())
def main(): global namespaceId, streamPressureName, streamTempName, exception try: config = configparser.ConfigParser() config.read('config.ini') namespaceId = config.get('Configurations', 'Namespace') # step 1 ocsClient: OCSClient = OCSClient( config.get('Access', 'ApiVersion'), config.get('Access', 'Tenant'), config.get('Access', 'Resource'), config.get('Credentials', 'ClientId'), config.get('Credentials', 'ClientSecret'), False) # step 2 print('Creating value and time type') timeValueType = GetType_ValueTime() timeValueType = ocsClient.Types.getOrCreateType( namespaceId, timeValueType) # step 3 print('Creating a stream for pressure and temperature') pressureStream = SdsStream( id=streamPressureName, typeId=timeValueType.Id, description="A stream for pressure data of tank1") ocsClient.Streams.createOrUpdateStream(namespaceId, pressureStream) temperatureStream = SdsStream( id=streamTempName, typeId=timeValueType.Id, description="A stream for temperature data of tank1") ocsClient.Streams.createOrUpdateStream(namespaceId, temperatureStream) # step 4 ocsClient.Streams.insertValues(namespaceId, pressureStream.Id, json.dumps((GetPressureData()))) ocsClient.Streams.insertValues(namespaceId, temperatureStream.Id, json.dumps((GetTemperatureData()))) # step 5 print('Creating a tank type that has both stream and temperature') tankType = GetType_PressTempTime() tankType = ocsClient.Types.getOrCreateType(namespaceId, tankType) # step 6 print('Creating a tank stream') tankStream = SdsStream(id=streamTank1, typeId=tankType.Id, description="A stream for data of tank1s") ocsClient.Streams.createOrUpdateStream(namespaceId, tankStream) # step 7 ocsClient.Streams.insertValues(namespaceId, streamTank1, json.dumps(GetData())) print() print() print('Looking at the data in the system. In this case we have some' 'null values that are encoded as 0 for the value.') data = GetData() tank1Sorted = sorted(data, key=lambda x: x['time'], reverse=False) print() print('Value we sent:') print(tank1Sorted[1]) firstTime = tank1Sorted[0]['time'] lastTime = tank1Sorted[-1]['time'] # step 8 results = ocsClient.Streams.getWindowValues(namespaceId, streamPressureName, None, firstTime, lastTime) print() print('Value from pressure stream:') print((results)[1]) print() print('Value from tank1 stream:') results = ocsClient.Streams.getWindowValues(namespaceId, streamTank1, None, firstTime, lastTime) print((results)[1]) # step 9 print() print() print("turning on verbosity") ocsClient.acceptverbosity = True print("This means that will get default values back (in our case" " 0.0 since we are looking at doubles)") print() print('Value from pressure stream:') results = ocsClient.Streams.getWindowValues(namespaceId, streamPressureName, None, firstTime, lastTime) print((results)[1]) print() print('Value from tank1 stream:') results = ocsClient.Streams.getWindowValues(namespaceId, streamTank1, None, firstTime, lastTime) print((results)[1]) # step 10 print() print() print("Getting data summary") # the count of 1 refers to the number of intervals requested summaryResults = ocsClient.Streams.getSummaries( namespaceId, streamTank1, None, firstTime, lastTime, 1) print(summaryResults) print() print() print('Now we want to look at data across multiple tanks.') print('For that we can take advantage of bulk stream calls') print('Creating new tank streams') tankStream = SdsStream(id=streamTank2, typeId=tankType.Id, description="A stream for data of tank2") ocsClient.Streams.createOrUpdateStream(namespaceId, tankStream) dataTank2 = GetData_Tank2() ocsClient.Streams.insertValues(namespaceId, streamTank2, json.dumps(GetData_Tank2())) tank2Sorted = sorted(dataTank2, key=lambda x: x['time'], reverse=False) firstTimeTank2 = tank2Sorted[0]['time'] lastTimeTank2 = tank2Sorted[-1]['time'] tankStream = SdsStream(id=streamTank0, typeId=tankType.Id, description="") ocsClient.Streams.createOrUpdateStream(namespaceId, tankStream) ocsClient.Streams.insertValues(namespaceId, streamTank0, json.dumps(GetData())) time.sleep(10) # step 11 print('Getting bulk call results') results = ocsClient.Streams.getStreamsWindow( namespaceId, [streamTank0, streamTank2], None, firstTimeTank2, lastTimeTank2) print(results) except Exception as ex: exception = ex print(f"Encountered Error: {ex}") print() finally: # step 12 print() print() print() print("Cleaning up") print("Deleting the stream") supressError(lambda: ocsClient.Streams.deleteStream( namespaceId, streamPressureName)) supressError(lambda: ocsClient.Streams.deleteStream( namespaceId, streamTempName)) supressError( lambda: ocsClient.Streams.deleteStream(namespaceId, streamTank0)) supressError( lambda: ocsClient.Streams.deleteStream(namespaceId, streamTank1)) supressError( lambda: ocsClient.Streams.deleteStream(namespaceId, streamTank2)) print("Deleting the types") supressError(lambda: ocsClient.Types.deleteType( namespaceId, typePressureTemperatureTimeName)) supressError( lambda: ocsClient.Types.deleteType(namespaceId, typeValueTimeName)) if (exception): raise exception
def main(test=False): global namespaceId success = True exception = {} try: print( "--------------------------------------------------------------------" ) print( " ###### ###### # # " ) print( " # # ## ##### ## # # # ###### # # # # # # " ) print( " # # # # # # # # # # # # # # # # # " ) print( " # # # # # # # # # # ##### # # ###### # " ) print( " # # ###### # ###### # # # # # ## # # # " ) print( " # # # # # # # # # # # ## ## # # " ) print( " ###### # # # # # ## # ###### # # # # " ) print( "--------------------------------------------------------------------" ) # Step 1 ocsClient = OCSClient(config.get('Access', 'ApiVersion'), config.get('Access', 'Tenant'), config.get('Access', 'Resource'), config.get('Credentials', 'ClientId'), config.get('Credentials', 'ClientSecret')) namespaceId = config.get('Configurations', 'Namespace') print(namespaceId) print(ocsClient.uri) # Step 2 if needData: createData(ocsClient) sampleStreamId = "SampleStream" ####################################################################### # Dataviews ####################################################################### # We need to create the dataview. # For our dataview we are going to combine the two streams that were # created, using a search to find the streams, # using common part of their name. # We are using the default mappings. # This means our columns will keep their original names. # Another typical use of columns is to change what stream properties # get mapped to which column. # Mappings allow you to rename a column in the results to something # different. So if we want to we could rename Pressure to press. # We then define the IndexDataType. Currently only # datetime is supported. # Next we need to define IndexConfig. It holds the default # startIndex and endIndex to define a time period, mode (interpolated), # and interpolation interval. # Our results when looking at it like a table looks like: # # time,pressure,temperature # 2019-06-27T12:23:00Z,36.3668286389033,60.614978497887 # 2019-06-27T12:24:00Z,36.3668286389033,60.614978497887 # 2019-06-27T12:25:00Z,36.3668286389033,60.614978497887 # 2019-06-27T12:26:00Z,40.5653155047711,59.4181700259214 # 2019-06-27T12:27:00Z,54.5602717243303,55.4288084527031 # ... # Step 3 queryObj = DataviewQuery(sampleDataviewId, f"name:*{sampleStreamId}*") mappingObj = DataviewMapping() if startTime: indexConfigObj = DataviewIndexConfig( startIndex=startTime.isoformat(timespec='minutes'), endIndex=(startTime + datetime.timedelta(minutes=40)).isoformat( timespec='minutes'), mode="Interpolated", interval="00:01:00") else: indexConfigObj = None dataview = Dataview(id=sampleDataviewId, queries=queryObj, indexDataType="datetime", mappings=mappingObj, name=sampleDataviewName, indexConfig=indexConfigObj, description=sampleDataviewDescription) print print("Creating dataview") print(dataview.toJson()) dataviews = ocsClient.Dataviews.postDataview(namespaceId, dataview, defaultMappings=True) # Step 4 print print("Getting dataview") dv = ocsClient.Dataviews.getDataview(namespaceId, sampleDataviewId) # assert is added to make sure we get back what we are expecting expectedJSON = '{"Id": "Dataview_Sample", "Queries": [{"Id": "Dataview_Sample", "Query": "name:*SampleStream*"}], "Name": "Dataview_Sample_Name", "Description": "A Sample Description that describes that this Dataview is just used for our sample.", "Mappings": {"Columns": [{"Name": "time", "IsKey": true, "DataType": "DateTime", "MappingRule": {"PropertyPaths": ["time"]}}, {"Name": "pressure", "IsKey": false, "DataType": "Double", "MappingRule": {"PropertyPaths": ["pressure"]}}, {"Name": "temperature", "IsKey": false, "DataType": "Double", "MappingRule": {"PropertyPaths": ["temperature"]}}]}, "IndexConfig": ' + indexConfigObj.toJson( withSeconds=True ) + ', "IndexDataType": "DateTime", "GroupRules": []}' assert dv.toJson().lower() == expectedJSON.lower( ), 'Dataview is different: ' + dv.toJson() # Also check that the new dataview shows up in the list of all dataviews dv_all = ocsClient.Dataviews.getDataviews(namespaceId) match_dv = [dv.Id for dv in dv_all if dv.Id == sampleDataviewId] assert len( match_dv ) == 1, "Did not find created Dataview in list of all of them" dv.Description = sampleDataviewDescription_modified dv.Mappings.IsDefault = False # for now we have to change this to post # Step 5 print print("Updating dataview") # No dataview returned, success is 204 ocsClient.Dataviews.putDataview(namespaceId, dv) # Step 6 # Getting the complete set of dataviews to make sure it is there print print("Getting dataviews") dataviews = ocsClient.Dataviews.getDataviews(namespaceId) for dataview1 in dataviews: if hasattr(dataview1, "Id"): print(dataview1.toJson()) # Getting the datagroups of the defined dataview. # The datgroup lets you see what is returned by the Dataview Query. print print("Getting Datagroups") # Step 7 # This works for the automated test. You can use this or the below. datagroups = ocsClient.Dataviews.getDatagroups(namespaceId, sampleDataviewId, 0, 100, True) print('datagroups') print(datagroups) # By default the preview get interpolated values every minute over the # last hour, which lines up with our data that we sent in. # Beyond the normal API options, this function does have the option # to return the data in a class if you have created a Type for the # data you are retrieving. # Step 8 print print("Retrieving data preview from the Dataview") dataviewDataPreview1 = ocsClient.Dataviews.getDataInterpolated( namespaceId, sampleDataviewId) print(str(dataviewDataPreview1[0])) # Step 9 print() print("Getting data as a table, seperated by commas, with headers") # Get the first 20 rows, keep token for next 20 rows dataviewDataTable1, token = ocsClient.Dataviews.getDataInterpolated( namespaceId, sampleDataviewId, form="csvh", count=20) # Display received 20 lines showing: # * First lines with extrapolation (first value replicated of each stream) # * Interpolated values at 1 minute interval, stream recorded at 2 minutes interval print(dataviewDataTable1) # Get the last 20 rows using token, then display (without row header) dataviewDataTable2, token = ocsClient.Dataviews.getDataInterpolated( namespaceId, sampleDataviewId, form="csv", count=21, continuationToken=token) print(dataviewDataTable2, "\n\n") assert token is None, f"Continuation token is not None: got {token}" # Now override startIndex/endIndex/interval of previous Data View # Ask for last 5 minutes of data, aligned on the seconds, interpolated at 30 seconds startIndex = (startTime + datetime.timedelta(minutes=55)).isoformat( timespec='seconds') endIndex = (startTime + datetime.timedelta(minutes=60)).isoformat( timespec='seconds') dataviewDataTable3, token2 = ocsClient.Dataviews.getDataInterpolated( namespaceId, sampleDataviewId, form="csvh", count=11, continuationToken=None, startIndex=startIndex, endIndex=endIndex, interval="00:00:30") print(dataviewDataTable3) assert token2 is None, f"Continuation token is not None: got {token2}" except Exception as ex: print((f"Encountered Error: {ex}")) print traceback.print_exc() print success = False exception = ex finally: ####################################################################### # Dataview deletion ####################################################################### print print print("Deleting dataview") # Step 10 supressError(lambda: ocsClient.Dataviews.deleteDataview( namespaceId, sampleDataviewId)) # check, including assert is added to make sure we deleted it dv = None try: dv = ocsClient.Dataviews.getDataview(namespaceId, sampleDataviewId) except Exception as ex: # Exception is expected here since dataview has been deleted dv = None finally: assert dv is None, 'Delete failed' print("Verification OK: dataview effectively deleted") if needData: print("Deleting added Streams") supressError(lambda: ocsClient.Streams.deleteStream( namespaceId, samplePressureStreamId)) supressError(lambda: ocsClient.Streams.deleteStream( namespaceId, sampleTemperatureStreamId)) print("Deleting added Types") supressError(lambda: ocsClient.Types.deleteType( namespaceId, samplePressureTypeId)) supressError(lambda: ocsClient.Types.deleteType( namespaceId, sampleTemperatureTypeId)) if test and not success: raise exception
Resource = https://dat-b.osisoft.com Tenant = 65292b6c-ec16-414a-b583-ce7ae04046d4 ApiVersion = v1-preview [Credentials] ClientId = 82fca0c2-3004-42c0-81cf-cc6968df1f47 ClientSecret = 3VYy318vxlFGKVuQ4+toahSyg7IqWUVKiGHJDvH/IvY= """ config = configparser.ConfigParser(allow_no_value=True) config.read_file(io.StringIO(config_text)) ocs_client = OCSClient( config.get("Access", "ApiVersion"), config.get("Access", "Tenant"), config.get("Access", "Resource"), config.get("Credentials", "ClientId"), config.get("Credentials", "ClientSecret"), ) namespace_id = config.get("Configurations", "Namespace") # print(f"namespace_id: '{namespace_id}'") all_buildings = ['ARC Pavilion', 'Academic Surge Building', 'Activities and Recreation Center', 'Advanced Materials Research Laboratory', 'Advanced Transportation Infrastructure Research Center', 'Aggie Stadium', 'Agronomy Field Laboratory', 'Animal Building', 'Animal Resource Service J1', 'Animal Resource Service M3', 'Animal Resource Service N1', 'Ann E. Pitzer Center', 'Antique Mechanics Trailer', 'Aquatic Biology & Environmental Science Bldg', 'Art Building', 'Art Building Annex', 'Art, Music, Wright Halls', 'Asmundson Annex', 'Asmundson Hall', 'Bainer Hall', 'Bowley Head House', 'Briggs Hall', 'California Hall', 'Campus Data Center', 'Cellular Biology Laboratory', 'Center for Companion Animal Health', 'Center for Comparative Medicine',
def main(test=False): global namespaceId, firstData success = True exception = {} try: print("--------------------------------------------------------------------") print(" ###### ###### # # ") print(" # # ## ##### ## # # # ###### # # # # # # ") print(" # # # # # # # # # # # # # # # # # ") print(" # # # # # # # # # # ##### # # ###### # ") print(" # # ###### # ###### # # # # # ## # # # ") print(" # # # # # # # # # # # ## ## # # ") print(" ###### # # # # # ## # ###### # # # # ") print("--------------------------------------------------------------------") ocsClient = OCSClient(config.get('Access', 'ApiVersion'), config.get('Access', 'Tenant'), config.get('Access', 'Resource'), config.get('Credentials', 'ClientId'), config.get('Credentials', 'ClientSecret')) namespaceId = config.get('Configurations', 'Namespace') print(namespaceId) print(ocsClient.uri) if needData: createData(ocsClient) sampleStreamId = "SampleStream" ####################################################################### # Dataviews ####################################################################### # We need to create the dataview. # For our dataview we are going to combine the two streams that were # created, using a search to find the streams, # using common part of their name. # We are using the default mappings. # This means our columns will keep their original names. # Another typical use of columns is to change what stream properties # get mapped to which column. # Mappings allow you to rename a column in the results to something # different. So if we want to we could rename Pressure to press. # We then define the IndexDataType. Currently only # datetime is supported. # Next we need to define the grouping rules. # Grouping decides how each row in the result is filled in. # In this case we are grouping by tag, which effectively squashes are # results together so that way Pressure and Temperature and Time all # get results in a row. # If we grouped by StreamName, each row would be filled is as fully # as it can by each Stream name. Giving us results with # Pressure and Time seperate from Pressure and Temperature # Our results when looking at it like a table looks like: # time,DefaultGroupRule_Tags,pressure,temperature # 2019-02-18T18:50:17.1084594Z,(NoTags),13.8038967965309,57.6749982613741 # 2019-02-18T18:51:17.1084594Z,(NoTags),13.8038967965309,57.674998261374 # .... queryObj = DataviewQuery(sampleDataviewId, 'streams', 'name', sampleStreamId, 'Contains') groupRuleObj = DataviewGroupRule("DefaultGroupRule", "StreamTag") mappingObj = DataviewMapping(isDefault=True) dataview = Dataview(id=sampleDataviewId, queries=[queryObj], indexDataType="datetime", groupRules=[groupRuleObj], mappings=mappingObj, name=sampleDataviewName, description=sampleDataviewDescription) print print("Creating dataview") print(dataview.toJson()) dataviews = ocsClient.Dataviews.postDataview(namespaceId, dataview) print print("Getting dataview") dv = ocsClient.Dataviews.getDataview(namespaceId, sampleDataviewId) # assert is added to make sure we get back what we are expecting expectedJSON = '{"Id": "Dataview_Sample", "Queries": [{"Id": "Dataview_Sample", "Query": {"Resource": "Streams", "Field": "Name", "Value": "SampleStream", "Function": "Contains"}}], "Name": "Dataview_Sample_Name", "Description": "A Sample Description that describes that this Dataview is just used for our sample.", "Mappings": {"IsDefault": true, "Columns": [{"Name": "time", "IsKey": true, "DataType": "DateTime", "MappingRule": {"PropertyPaths": ["time"]}}, {"Name": "DefaultGroupRule_Tags", "IsKey": false, "DataType": "string", "MappingRule": {"GroupRuleId": "DefaultGroupRule", "GroupRuleToken": "Tags"}}, {"Name": "pressure", "IsKey": false, "DataType": "Double", "MappingRule": {"PropertyPaths": ["pressure"]}}, {"Name": "temperature", "IsKey": false, "DataType": "Double", "MappingRule": {"PropertyPaths": ["temperature"]}}]}, "IndexDataType": "datetime", "GroupRules": [{"Id": "DefaultGroupRule", "Type": "StreamTag", "TokenRules": null}]}' assert dv.toJson().lower() == expectedJSON.lower(), 'Dataview is different: ' + dv.toJson() dv.Description = sampleDataviewDescription_modified dv.Mappings.IsDefault = False # for now we have to change this to post print print("Updating dataview") dv = ocsClient.Dataviews.putDataview(namespaceId, dv) expectedJSON = '{"Id": "Dataview_Sample", "Queries": [{"Id": "Dataview_Sample", "Query": {"Resource": "Streams", "Field": "Name", "Value": "SampleStream", "Function": "Contains"}}], "Name": "Dataview_Sample_Name", "Description": "A longer sample description that describes that this Dataview is just used for our sample and this part shows a put.", "Mappings": {"IsDefault": true, "Columns": [{"Name": "time", "IsKey": true, "DataType": "DateTime", "MappingRule": {"PropertyPaths": ["time"]}}, {"Name": "DefaultGroupRule_Tags", "IsKey": false, "DataType": "string", "MappingRule": {"GroupRuleId": "DefaultGroupRule", "GroupRuleToken": "Tags"}}, {"Name": "pressure", "IsKey": false, "DataType": "Double", "MappingRule": {"PropertyPaths": ["pressure"]}}, {"Name": "temperature", "IsKey": false, "DataType": "Double", "MappingRule": {"PropertyPaths": ["temperature"]}}]}, "IndexDataType": "datetime", "GroupRules": [{"Id": "DefaultGroupRule", "Type": "StreamTag", "TokenRules": null}]}' assert dv.toJson().lower() == expectedJSON.lower(), 'Dataview is different ' + dv.toJson() # Getting the complete set of dataviews to make sure it is there print print("Getting dataviews") dataviews = ocsClient.Dataviews.getDataviews(namespaceId) for dataview1 in dataviews: if hasattr(dataview1, "Id"): print(dataview1.toJson()) # Getting the datagroups of the defined dataview. # The datgroup lets you see what is returned by the Dataview Query. print print("Getting Datagroups") # This works for the automated test. You can use this or the below. datagroups = ocsClient.Dataviews.getDatagroups( namespaceId, sampleDataviewId, 0, 100, True) print('datagroups') print(datagroups) # This works locally, but fails during automated tests for some reason # datagroups = ocsClient.Dataviews.getDatagroups(namespaceId, sampleDataviewId) # for key, datagroup in datagroups['DataGroups'].items(): # print('datagroup') # print(datagroup.toJson()) # By default the preview get interpolated values every minute over the # last hour, which lines up with our data that we sent in. # Beyond the normal API optoins, this function does have the option # to return the data in a class if you have created a Type for the # data you are retreiving. print print("Retrieving data preview from the Dataview") dataviewDataPreview1 = ocsClient.Dataviews.getDataviewPreview( namespaceId, sampleDataviewId) print(str(dataviewDataPreview1[0])) # Now we can get the data creating a session. # The session allows us to get pages of data ensuring that the u # nderlying data won't change as we collect the pages. # There are apis to manage the sessions, but that is beyond the scope # of this basic example. # To highlight the use of the sessions this we will access the data, # add a stream that would be added to result. # It won't show up because of the session, # but we will see it in the preview that doesn't use the session. print print("Retrieving data from the Dataview using session") dataviewDataSession1 = ocsClient.Dataviews.getDataInterpolated( namespaceId, sampleDataviewId) print(str(dataviewDataSession1[0])) print("Intentional waiting for 5 seconds to show a noticeable change" " in time.") # We wait for 5 seconds so the preview is different that before, but # our session data should be the same time.sleep(5) dataviewDataPreview2 = ocsClient.Dataviews.getDataviewPreview( namespaceId, sampleDataviewId) print(str(dataviewDataPreview2[0])) dataviewDataSession2 = ocsClient.Dataviews.getDataInterpolated( namespaceId, sampleDataviewId) print(str(dataviewDataSession2[0])) assert (dataviewDataSession2[0] == dataviewDataSession1[0]), "Returned values from Dataview Data Sessions is different" print() print("Getting data as a table, seperated by commas, with headers") # Viewing the whole returned result as a table dataviewDataSession3 = ocsClient.Dataviews.getDataInterpolated( namespaceId, sampleDataviewId, form="csvh") # I only want to print out the headers and 2 rows, # otherwise it monpolizes the printed screen. print(dataviewDataSession3[:193]) except Exception as ex: print((f"Encountered Error: {ex}")) print traceback.print_exc() print success = False exception = ex finally: ####################################################################### # Dataview deletion ####################################################################### print print print("Deleting dataview") supressError(lambda: ocsClient.Dataviews.deleteDataview( namespaceId, sampleDataviewId)) # check, including assert is added to make sure we deleted it dv = None try: dv = ocsClient.Dataviews.getDataview(namespaceId, sampleDataviewId) except Exception as ex: print((f"Encountered Error: {ex}")) dv = None finally: assert dv is None, 'Delete failed' if needData: print("Deleting added Streams") supressError(lambda: ocsClient.Streams.deleteStream( namespaceId, samplePressureStreamId)) supressError(lambda: ocsClient.Streams.deleteStream( namespaceId, sampleTemperatureStreamId)) print("Deleting added Types") supressError(lambda: ocsClient.Types.deleteType( namespaceId, samplePressureTypeId)) supressError(lambda: ocsClient.Types.deleteType( namespaceId, sampleTemperatureTypeId)) if test and not success: raise exception
def main(test=False): global namespaceId success = True exception = {} try: print("--------------------------------------------------------------------") print(" ###### # # ###### # # ") print(" # # ## ##### ## # # # ###### # # # # # # ") print(" # # # # # # # # # # # # # # # # # ") print(" # # # # # # # # # # ##### # # ###### # ") print(" # # ###### # ###### # # # # # ## # # # ") print(" # # # # # # # # # # # ## ## # # ") print(" ###### # # # # # ## # ###### # # # # ") print("--------------------------------------------------------------------") # Step 1 print() print("Step 1: Authenticate against OCS") ocsClient = OCSClient(config.get('Access', 'ApiVersion'), config.get('Access', 'Tenant'), config.get('Access', 'Resource'), config.get('Credentials', 'ClientId'), config.get('Credentials', 'ClientSecret')) namespaceId = config.get('Configurations', 'Namespace') print(namespaceId) print(ocsClient.uri) # Step 2 print() print ("Step 2: Create types, streams, and data") if needData: createData(ocsClient) # Step 3 print() print("Step 3: Create a data view") dataView = DataView(id=sampleDataViewId,name=sampleDataViewName,description=sampleDataViewDescription) dataViews = ocsClient.DataViews.postDataView(namespaceId, dataView) # Step 4 print() print("Step 4: Retrieve the data view") dv = ocsClient.DataViews.getDataView(namespaceId, sampleDataViewId) print(dv.toJson()) # Step 5 print() print("Step 5: Add a query for data items") query = Query(id=queryID, value=queryString) dv.Queries.append(query) # No Data View returned, success is 204 ocsClient.DataViews.putDataView(namespaceId, dv) # Step 6 print() print("Step 6: View items found by the query") print("List data items found by the query:") dataItems = ocsClient.DataViews.getResolvedDataItems( namespaceId, sampleDataViewId, queryID) print(dataItems.toJson()) print("List ineligible data items found by the query:") dataItems = ocsClient.DataViews.getResolvedIneligibleDataItems( namespaceId, sampleDataViewId, queryID) print(dataItems.toJson()) # Step 7 print() print("Step 7: View fields available to include in the data view") availablefields = ocsClient.DataViews.getResolvedAvailableFieldSets( namespaceId, sampleDataViewId) print(availablefields.toJson()) # Step 8 print() print("Step 8: Include some of the available fields") dv.DataFieldSets = availablefields.Items ocsClient.DataViews.putDataView(namespaceId, dv) print("List available field sets:") availablefields = ocsClient.DataViews.getResolvedAvailableFieldSets( namespaceId, sampleDataViewId) print(availablefields.toJson()) print("Retrieving data from the data view:") dataViewDataPreview1 = ocsClient.DataViews.getDataInterpolated( namespace_id=namespaceId, dataView_id=sampleDataViewId, startIndex=startTime, endIndex=endTime, interval=interval) print(str(dataViewDataPreview1)) print(len(dataViewDataPreview1)) assert len(dataViewDataPreview1) > 0, "Error getting back data" # Step 9 print() print("Step 9: Group the data view") grouping = Field(source=fieldSourceForGrouping, label="{DistinguisherValue} {FirstKey}") dv.GroupingFields.append(grouping) # No DataView returned, success is 204 ocsClient.DataViews.putDataView(namespaceId, dv) print("Retrieving data from the data view:") dataViewDataPreview1 = ocsClient.DataViews.getDataInterpolated( namespace_id=namespaceId, dataView_id=sampleDataViewId, startIndex=startTime, endIndex=endTime, interval=interval) print(str(dataViewDataPreview1)) assert len(dataViewDataPreview1) > 0, "Error getting back data" # Step 10 print() print("Step 10: Identify data items") identify = dv.GroupingFields.pop() dvDataItemFieldSet = find_FieldSet(dv.DataFieldSets, queryID) dvDataItemFieldSet.IdentifyingField = identify # No Data View returned, success is 204 ocsClient.DataViews.putDataView(namespaceId, dv) print("Retrieving data from the data view:") dataViewDataPreview1 = ocsClient.DataViews.getDataInterpolated( namespace_id=namespaceId, dataView_id=sampleDataViewId, startIndex=startTime, endIndex=endTime, interval=interval) print(str(dataViewDataPreview1)) assert len(dataViewDataPreview1) > 0, "Error getting back data" # Step 11 print() print("Step 11: Consolidate data fields") field1 = find_Field_Key(dvDataItemFieldSet.DataFields, FieldSource.PropertyId, fieldToConsolidateTo) field2 = find_Field_Key(dvDataItemFieldSet.DataFields, FieldSource.PropertyId, fieldToConsolidate) print(field1.toJson()) print(field2.toJson()) field1.Keys.append(fieldToConsolidate) dvDataItemFieldSet.DataFields.remove(field2) # No Data View returned, success is 204 ocsClient.DataViews.putDataView(namespaceId, dv) print("Retrieving data from the data view:") dataViewDataPreview1 = ocsClient.DataViews.getDataInterpolated( namespace_id=namespaceId, dataView_id=sampleDataViewId, startIndex=startTime, endIndex=endTime, interval=interval) print(str(dataViewDataPreview1)) assert len(dataViewDataPreview1) > 0, "Error getting back data" except Exception as ex: print((f"Encountered Error: {ex}")) print traceback.print_exc() print success = False exception = ex finally: ####################################################################### # Data View deletion ####################################################################### # Step 12 print() print("Step 12: Delete sample objects from OCS") print("Deleting data view...") suppressError(lambda: ocsClient.DataViews.deleteDataView( namespaceId, sampleDataViewId)) # check, including assert is added to make sure we deleted it dv = None try: dv = ocsClient.DataViews.getDataView(namespaceId, sampleDataViewId) except Exception as ex: # Exception is expected here since Data View has been deleted dv = None finally: assert dv is None, 'Delete failed' print("Verification OK: Data View deleted") if needData: print("Deleting sample streams...") suppressError(lambda: ocsClient.Streams.deleteStream( namespaceId, sampleStreamId)) suppressError(lambda: ocsClient.Streams.deleteStream( namespaceId, sampleStreamId2)) print("Deleting sample types...") suppressError(lambda: ocsClient.Types.deleteType( namespaceId, sampleTypeId)) suppressError(lambda: ocsClient.Types.deleteType( namespaceId, samplePressureId2)) if test and not success: raise exception