def publish_to_historian(self, to_publish_list): _log.debug("publish_to_historian number of items: {}".format( len(to_publish_list))) start_time = get_utc_seconds_from_epoch() if self._client is None: success = self._establish_client_connection() if not success: return try: cursor = self._client.cursor() batch_data = [] for row in to_publish_list: ts = utils.format_timestamp(row['timestamp']) source = row['source'] topic = row['topic'] value = row['value'] meta = row['meta'] topic_lower = topic.lower() # Handle the serialization of data here because we can't pass # an array as a string so we create a string from the value. if isinstance(value, list) or isinstance(value, dict): value = dumps(value) if topic_lower not in self._topic_meta: try: cursor.execute(insert_topic_query(self._schema, self._topic_table), (topic, meta)) except ProgrammingError as ex: if ex.args[0].startswith( 'SQLActionException[DuplicateKeyException'): self._topic_meta[topic_lower] = meta else: _log.error(repr(ex)) _log.error( "Unknown error during topic insert {} {}".format( type(ex), ex.args )) else: self._topic_meta[topic_lower] = meta else: # check if metadata matches old_meta = self._topic_meta.get(topic_lower) if not old_meta: old_meta = {} if set(old_meta.items()) != set(meta.items()): _log.debug( 'Updating meta for topic: {} {}'.format(topic, meta)) self._topic_meta[topic_lower] = meta cursor.execute(update_topic_query(self._schema, self._topic_table), (meta, topic)) batch_data.append( (ts, topic, source, value, meta) ) try: query = insert_data_query(self._schema, self._data_table) # _log.debug("Inserting batch data: {}".format(batch_data)) results = cursor.executemany(query, batch_data) index = 0 failures = [] for r in results: if r['rowcount'] != 1: failures.append(index) index += 1 if failures: for findex in failures: data = batch_data[findex] _log.error("Failed to insert data {}".format(data)) self.report_handled(to_publish_list[findex]) except ProgrammingError as ex: _log.error( "Invalid data detected during batch insert: {}".format( ex.args)) _log.debug("Attempting singleton insert.") insert = insert_data_query(self._schema, self._data_table) for id in range(len(batch_data)): try: batch = batch_data[id] cursor.execute(insert, batch) except ProgrammingError: _log.debug('Invalid data not saved {}'.format( batch )) except Exception as ex: _log.error(repr(ex)) else: self.report_handled(to_publish_list[id]) except Exception as ex: _log.error( "Exception Type: {} ARGS: {}".format(type(ex), ex.args)) else: self.report_all_handled() except TypeError as ex: _log.error(repr(ex)) _log.error( "AFTER EXCEPTION: {} ARGS: {}".format(type(ex), ex.args)) except Exception as ex: _log.error(repr(ex)) _log.error( "Unknown Exception {} {}".format(type(ex), ex.args) ) finally: if cursor is not None: cursor.close() cursor = None
def publish_to_historian(self, to_publish_list): # _log.debug("publish_to_historian number of items: {}".format( # len(to_publish_list))) start_time = get_utc_seconds_from_epoch() if self._client is None: success = self._establish_client_connection() if not success: return try: cursor = self._client.cursor() batch_data = [] for row in to_publish_list: ts = utils.format_timestamp(row['timestamp']) source = row['source'] topic = self.get_renamed_topic(row['topic']) value = row['value'] meta = row['meta'] # Handle the serialization of data here because we can't pass # an array as a string so we create a string from the value. if isinstance(value, list) or isinstance(value, dict): value = dumps(value) if topic not in self._topic_set: try: cursor.execute(insert_topic_query(self._schema), (topic,)) except ProgrammingError as ex: if ex.args[0].startswith( 'SQLActionException[DuplicateKeyException'): self._topic_set.add(topic) else: _log.error(repr(ex)) _log.error( "Unknown error during topic insert {} {}".format( type(ex), ex.args )) else: self._topic_set.add(topic) batch_data.append( (ts, topic, source, value, meta) ) try: query = insert_data_query(self._schema) # _log.debug("Inserting batch data: {}".format(batch_data)) results = cursor.executemany(query, batch_data) index = 0 failures = [] for r in results: if r['rowcount'] == -1: failures.append(index) index += 1 if failures: for findex in failures: data = batch_data[findex] _log.error("Failed to insert data {}".format(data)) self.report_handled(to_publish_list[findex]) except ProgrammingError as ex: _log.error( "Invalid data detected during batch insert: {}".format( ex.args)) _log.debug("Attempting singleton insert.") insert = insert_data_query(self._schema) for id in range(len(batch_data)): try: batch = batch_data[id] cursor.execute(insert, batch) except ProgrammingError: _log.debug('Invalid data not saved {}'.format( to_publish_list[id] )) self.report_handled(to_publish_list[id]) except Exception as ex: _log.error(repr(ex)) else: self.report_handled(to_publish_list[id]) except Exception as ex: _log.error( "Exception Type: {} ARGS: {}".format(type(ex), ex.args)) else: self.report_all_handled() except TypeError as ex: _log.error(repr(ex)) _log.error( "AFTER EXCEPTION: {} ARGS: {}".format(type(ex), ex.args)) except Exception as ex: _log.error(repr(ex)) _log.error( "Unknown Exception {} {}".format(type(ex), ex.args) ) finally: if cursor is not None: cursor.close() cursor = None