def parse_resultset(variables, data_point, result_set, last_date=None): if len(result_set) > 0 and type( result_set[0][variables.index(data_point)]) in [str, unicode]: res = [[ lst[variables.index('time')], lst[variables.index(data_point)] ] for lst in result_set] return res try: x = [[ lst[variables.index('time')], lst[variables.index(data_point)] + 0.0 ] for lst in result_set if lst[variables.index(data_point)] is not None] except TypeError: return [] if len(x) == 0: return [] #sort based on time x = sorted(x, key=lambda n: n[0]) #interleave redundant data to make it step-plot if last_date == None: last_date = datetime.datetime.utcnow() else: last_date = date_converter.localToUTC(last_date) last_time = int( (last_date - datetime.datetime(1970, 1, 1)).total_seconds() * 1000) old = numpy.array(x) newTime = numpy.append( old[1:, 0], last_time ) - 1.0 #decrease one millisecond time to arrange for chronological order newList = numpy.vstack((newTime, old[:, 1])).transpose().tolist() old = old.tolist() finalResult = list(itertools.chain(*zip(old, newList))) return finalResult
def timedelta(value, arg=None): if not value: return '' if value.tzinfo != pytz.UTC: value = date_converter.localToUTC(value) else: value = value.replace(tzinfo=None) #no tzone is assumed UTC if arg: cmp = arg else: cmp = datetime.utcnow() if value > cmp: return "in %s" % timesince(cmp, value) else: return "%s ago" % timesince(value, cmp)
def insert(agentID, all_vars, log_vars, cur_timeLocal=None, tablename=None): """ :param agentID: string. Data will be inserted to table named B<agent_id> :param all_vars: dictionary (usually APIobject.variables). It contains all the variables and their values. :param log_vars: dictionary (usually APIobject.log_variables). It contains variables to be logged and their datatypes :return: 0, if successful timestamp is generated based on current utc time. UTC time is put in cassandra database. If the table by the agent name doesn't exist, table is created. **If error occurs because the name of variables/data_type has changed, the old table will be deleted, and a new one with currect variable names/datatype will be created**. **Need to avoid doing this in final version.Feature made to help during development """ #make copies to not mess up original variables all_vars = dict(all_vars) log_vars = dict(log_vars) global connection_established if not connection_established: makeConnection() if cur_timeLocal == None: cur_timeUTC = datetime.datetime.utcnow() else: cur_timeUTC = date_converter.localToUTC(cur_timeLocal) date_only = str(cur_timeUTC.date()) if tablename == None: tablename = "B" + agentID #Add default columns to log_vars log_vars['agent_id'] = 'text' log_vars['date_id'] = 'text' log_vars['time'] = 'TIMESTAMP' #assign values to deafault columns all_vars['agent_id'] = agentID all_vars['date_id'] = date_only all_vars['time'] = cur_timeUTC return customInsert(all_vars=all_vars, log_vars=log_vars, tablename=tablename)
def EventRegister(self, dbcon, event, reason=None, source=None, event_time=None, notify=True): evt_vars = dict() event_time = date_converter.localToUTC( event_time) if event_time else datetime.now(pytz.UTC) source = source if source else self.agent_id reason = reason if reason else 'Unknown' logged_by = self.agent_id evt_vars['date_id'] = str(event_time.date()) evt_vars['logged_time'] = datetime.now(pytz.UTC) evt_vars['event_id'] = uuid.uuid4() evt_vars['time'] = event_time evt_vars['source'] = source evt_vars['event'] = event evt_vars['reason'] = reason evt_vars['logged_by'] = logged_by evt_vars['node_name'] = self.node_name #save to cassandra self.TSDCustomInsert(all_vars=evt_vars, log_vars=EVENTS_TABLE_VARS, tablename=EVENTS_TABLE_NAME) if notify: #save to notification table localTime = date_converter.UTCToLocal(event_time) message = source + ' ' + event + '. Reason: ' + reason dbcon.execute("select id from possible_events where event_name=%s", (event, )) event_id = dbcon.fetchone()[0] dbcon.execute( "select building_id from device_info where agent_id=%s", (source)) building_id = dbcon.fetchone()[0] dbcon.execute( "insert into notification (dt_triggered, seen, event_type_id, message, building_id) VALUES (%s, %s, %s, %s, %s)", (localTime, False, event_id, message, building_id)) dbcon.commit()
def retrieve(agentID, vars=None, startTime=None, endTime=None, export=False, tablename=None, weather_agent=None): """Function to retrieve Data from the active cassandra cluster. \n :param agentID: must supply, since each agentID is associated with a table in database. :param vars: supplied as a list of strings. It represents the variables to be retrieved from the table. eg. ['time','temperature','heat_setpoint']. If any of the variables don't match the column name, the result will contain -1. If not supplied, the default is to return the complete row \n\n :param startTime: the time in localtime zone (the timeweather_agentzone of the node), in datetime.datetime format. It marks the beginning for the range. If not supplied, will be taken 24-hours before endTime :param endTime: the time in localtime zone (the timezone of the node), in datetime.datetime format.It marks the end of the range. If not supplied, will be taken as the currentTime. :return: A numpy 2-dimensional array. Columns corresponds to variables querried, and rows corresponds to various table entries.The time is reported in local time (cassandra returns UTC time, conversion is done in this function). If the query fails, -1 is returned (and no exception raised) """ global connection_established if not connection_established: makeConnection() x = datetime.datetime.utcnow() - datetime.datetime.now() sec_offset = int(round(x.seconds + x.microseconds / 1000000.0)) timeDel = datetime.timedelta(seconds=sec_offset) if startTime == None: startTimeUTC = datetime.datetime.utcnow() - datetime.timedelta( hours=24) else: startTimeUTC = date_converter.localToUTC(startTime) #convert to UTC if endTime == None: endTimeUTC = datetime.datetime.utcnow() else: endTimeUTC = date_converter.localToUTC(endTime) #convert to UTC if tablename == None: tablename = str("B" + agentID).lower() if vars is None: try: result = bSpace.execute( "select column_name from system_schema.columns WHERE keyspace_name=%s and table_name=%s", [keyspace_name, tablename]) except: connection_established = False #Try to establish connection again next time raise vars = [var[0] for var in result] weather_vars = [ 'time', 'temperature', 'humidity', 'pressure', 'v_wind', 'sky_condition' ] varStr = ', '.join(vars) #to get rid of the last ', ' weatherStr = ', '.join(weather_vars) daterange = pandas.date_range( startTimeUTC - datetime.timedelta(days=1), endTimeUTC + datetime.timedelta(days=1) ) #pad with extra days, just in case date_id was stored as local date total_result = [] total_weather_result = [] try: for day in daterange: date_local = str(day.date()) result = bSpace.execute( 'select {0} from {1} WHERE agent_id=%s AND date_id=%s AND time >= %s AND time <= %s' .format(varStr, tablename), [agentID, date_local, startTimeUTC, endTimeUTC]) if result: result = numpy.array(list(result)) if total_result == []: total_result = result else: total_result = numpy.vstack((total_result, result)) if weather_agent: weather_result = bSpace.execute( 'select {0} from {1} WHERE agent_id=%s AND date_id=%s AND time >= %s AND time <= %s' .format(weatherStr, "B" + weather_agent.lower()), [weather_agent, date_local, startTimeUTC, endTimeUTC]) if weather_result: weather_result = numpy.array(list(weather_result)) backup_result = numpy.array(weather_result) weather_result[:, 0] = [ time.mktime(x.timetuple()) for x in weather_result[:, 0] ] if total_weather_result == []: total_weather_result = weather_result else: total_weather_result = numpy.vstack( (total_weather_result, weather_result)) if weather_agent: #Interpolate the Weather data to get values at data-points intp_weather = [] for row in total_result: unix_time = time.mktime(row[vars.index('time')].timetuple()) intp_weather.append([]) for wvars in weather_vars[1:]: try: if len(total_weather_result) == 0: raise ValueError( 'No weather data found for this day') indx = numpy.searchsorted( list(total_weather_result[:, 0]), unix_time) if indx >= len(list(total_weather_result[:, 0])): indx -= 1 if abs( total_weather_result[indx, 0] - unix_time ) > 2 * 60 * 60: #if no weather data within 2-hour in either side, put None raise ValueError('No weather data near this time') try: res = [ float( numpy.interp( unix_time, list(total_weather_result[:, 0]), list(total_weather_result[:, weather_vars. index(wvars)] ))) ] if numpy.isnan(res[0]): raise ValueError('Cannot interpolate') intp_weather[-1] += res except (ValueError, TypeError ): #string types can't be interpolated intp_weather[-1] += [ total_weather_result[indx, weather_vars.index(wvars)] ] except ValueError as Er: #print Er intp_weather[-1] += [None] vars += ['weather_' + x for x in weather_vars[1:] ] # append weather vars (except time) to the vars if len(total_result) and len(intp_weather): intp_weather = numpy.array(intp_weather, dtype='O') total_result = numpy.concatenate((total_result, intp_weather), axis=1) # If there is no data, return an empty list [] if len(total_result) == 0: return vars, total_result if vars is not None: #convert the UTC time to local time if time is present #more robust method would be look at data types of the result and convert them if it has datetime data type if 'cooling_mode' in vars: total_result[:, vars.index('cooling_mode')] = map( lambda x: x.encode('utf8'), total_result[:, vars.index('cooling_mode')]) if export: if 'time' in vars: time_array = total_result[:, vars.index('time')] time_array = [ date_converter.UTCToLocal(t) for t in time_array ] # convert to local time for exporting total_result[:, vars.index('time')] = map( lambda x: "{}".format(x.strftime('%y-%m-%d %H:%M:%S')), time_array) else: if 'time' in vars: time_array = total_result[:, vars.index('time')] total_result[:, vars.index('time')] = map( lambda x: int( (x - datetime.datetime(1970, 1, 1)).total_seconds( ) * 1000), time_array) #epoch times for chart if 'status' in vars: total_result[:, vars.index('status')] = map( lambda x: 1 if str(x).lower() == 'on' else 0 if str(x).lower() == 'off' else x, total_result[:, vars.index('status')]) if 'motion' in vars: total_result[:, vars.index('motion')] = map( lambda x: 1 if x == True else 0 if x == False else x, total_result[:, vars.index('motion')]) return vars, total_result except InvalidRequest as e: if e.message.find('unconfigured columnfamily') != -1: total_result = -1 print('table not exist') else: total_result = -1 print e except: connection_established = False #Try to establish connection again next time raise