示例#1
0
def processAutarcoFile(file):
    logger = logging.getLogger('solaroad.autarco')
    x = pd.read_csv(file, delimiter=';', skip_blank_lines=True)
    index_time = pd.to_datetime(x['time'], format="%Y-%m-%d %H:%M:%S")
    x.index = index_time
    for key in x.keys():
        if 'Unnamed' in key:
            del (x[key])
    x = x.dropna()

    for device in x['device'].unique():
        logger.debug('======================== Now processing %s ========================', device)
        y = x.loc[x['device'] == device]
        del (y['device'])
        del (y['time'])

        ctr = 0
        total_steps = np.round(len(y) / sc.MAX_POINTS) + 1
        while ctr < total_steps:
            server, auth_token = sc.authenticate()
            deviceId = sc.getDeviceId()

            sp = ctr * sc.MAX_POINTS
            tmp = y.iloc[sp:sp + sc.MAX_POINTS - 1, :]
            logger.debug('--------------------- RECORD %s/%s of %s ------------------------------', ctr + 1,
                         total_steps, device)

            if ctr == 0:
                sc.addSensor(server, auth_token, deviceId, device, SENSOR_NAME, device, device)

            logger.debug('Now uploading ' + device)

            for parameter in tmp['parameter'].unique():
                param_vals = tmp.loc[tmp['parameter'] == parameter]
                del (param_vals['parameter'])
                param_vals = param_vals.resample('300S').mean().interpolate(method='linear')

                packer = xdrlib.Packer()
                packer.pack_int(1)  # version 1

                packer.pack_enum(sc.SECONDS)
                packer.pack_int(300)

                POINTS = len(param_vals)
                packer.pack_int(POINTS)

                channel = '_'.join(parameter.replace('(', '').replace(')', '').split(' '))
                if ctr == 0:
                    sc.addChannel(server, auth_token, deviceId, device, channel, channel, channel)

                for item in param_vals['value'].iteritems():
                    val = item[1]
                    timestamp = item[0].to_pydatetime().timestamp() * 1000000000
                    packer.pack_hyper(int(timestamp))
                    packer.pack_float(float(val))

                data = packer.get_buffer()
                sc.uploadData(server, auth_token, deviceId, device, channel, data)
            ctr = ctr + 1
示例#2
0
def processLeGrandFile(file):
    logger = logging.getLogger('solaroad.legrand')

    x = pd.read_csv(file, delimiter=';', skip_blank_lines=True)
    x.columns = ['Date', 'kWh']
    index_time = pd.to_datetime(x['Date'], format='%m/%d/%Y %I:%M:%S %p')

    x.index = index_time
    for key in x.keys():
        if 'Unnamed' in key:
            del (x[key])
    x = x.dropna()
    del (x['Date'])
    x['kWh'] = pd.to_numeric(x['kWh'])

    # Authenticate
    server, auth_token = sc.authenticate()
    deviceId = sc.getDeviceId()

    # Add Sensor
    logger.debug('======================== Now processing Legrand ========================')
    sc.addSensor(server, auth_token, deviceId, SENSOR_NAME, SENSOR_NAME, SENSOR_NAME, SENSOR_NAME)

    # Pre-processing
    y = x.resample('12H').mean().interpolate(method='linear')

    # Break DataFrame into chunks of 100k
    ctr = 0
    total_steps = round(len(x) / sc.MAX_POINTS) + 1
    while ctr < total_steps:
        sp = ctr * sc.MAX_POINTS
        tmp = y.iloc[sp:sp + sc.MAX_POINTS - 1, :]
        logger.debug('--------------------- RECORD %s/%s ------------------------------', ctr + 1, total_steps)
        for key in tmp.keys():
            packer = xdrlib.Packer()
            packer.pack_int(1)  # version 1

            packer.pack_enum(sc.SECONDS)
            packer.pack_int(43200)

            POINTS = len(tmp[key])
            packer.pack_int(POINTS)

            logger.debug('Now uploading %s', key)

            if ctr == 0:
                sc.addChannel(server, auth_token, deviceId, SENSOR_NAME, CHANNEL_NAME, CHANNEL_NAME, CHANNEL_NAME)

            for item in tmp[key].iteritems():
                val = item[1]
                timestamp = item[0].to_pydatetime().timestamp() * 1000000000
                packer.pack_hyper(int(timestamp))
                packer.pack_float(float(val))

            data = packer.get_buffer()
            sc.uploadData(server, auth_token, deviceId, SENSOR_NAME, CHANNEL_NAME, data)
        ctr = ctr + 1
示例#3
0
def processDataFrame(y, sensor, sensorId):
    logger = logging.getLogger('solaroad.' + sensor)

    # Pre-processing
    x = y[np.abs(y['Energy (Wh)'] -
                 y['Energy (Wh)'].mean()) <= (2 * y['Energy (Wh)'].std())]
    x = x[(x['Energy (Wh)'] != 0)]
    x = x.dropna()
    x = x.resample('10S').mean().interpolate(method='linear')

    # Break DataFrame into chunks of 100k
    ctr = 0
    total_steps = np.round(len(x) / sc.MAX_POINTS) + 1
    while ctr < total_steps:
        # Authenticate
        server, auth_token = sc.authenticate()
        deviceId = sc.getDeviceId()

        sp = ctr * sc.MAX_POINTS
        tmp = x.iloc[sp:sp + sc.MAX_POINTS - 1, :]
        logger.debug(
            '--------------------- RECORD %s/%s of %s ------------------------------',
            ctr + 1, total_steps, sensorId)
        for key in tmp.keys():
            channel = '_'.join(
                key.replace('(', '').replace(')', '').split(' '))
            packer = xdrlib.Packer()
            packer.pack_int(1)  # version 1

            packer.pack_enum(sc.SECONDS)
            packer.pack_int(10)

            POINTS = len(tmp[key])
            packer.pack_int(POINTS)

            logger.debug('Now uploading %s', key)

            if ctr == 0:
                sc.addChannel(server, auth_token, deviceId, sensorId, channel,
                              channel, channel)

            for item in tmp[key].iteritems():
                val = item[1]
                timestamp = item[0].to_pydatetime().timestamp() * 1000000000
                packer.pack_hyper(int(timestamp))
                packer.pack_float(float(val))

            data = packer.get_buffer()
            sc.uploadData(server, auth_token, deviceId, sensorId, channel,
                          data)
        logger.debug(
            '--------------------- FINISHED RECORD %s/%s of %s ------------------------------',
            ctr + 1, total_steps, sensorId)
        ctr = ctr + 1
示例#4
0
def processAPSFile(file):
    logger = logging.getLogger('solaroad.aps')
    x = pd.read_csv(file, delimiter=';', skip_blank_lines=True)
    index_time = pd.to_datetime(x['Date'], format="%Y-%m-%d %H:%M:%S")
    x.index = index_time
    for key in x.keys():
        if 'Unnamed' in key:
            del (x[key])
    x = x.dropna()

    for inverterId in x['Inverter ID'].unique():
        logger.debug('======================== Now processing %s ========================', inverterId)
        y = x.loc[x['Inverter ID'] == inverterId]
        del (y['Inverter ID'])
        del (y['Date'])

        server, auth_token = sc.authenticate()
        deviceId = sc.getDeviceId()

        ctr = 0
        total_steps = np.round(len(y) / sc.MAX_POINTS) + 1
        while ctr < total_steps:
            sp = ctr * sc.MAX_POINTS
            tmp = y.iloc[sp:sp + sc.MAX_POINTS - 1, :]
            logger.debug('--------------------- RECORD %s/%s ------------------------------', ctr + 1, total_steps)

            sc.addSensor(server, auth_token, deviceId, inverterId, inverterId, inverterId, inverterId)

            packer = xdrlib.Packer()
            packer.pack_int(1)  # version 1

            packer.pack_enum(sc.SECONDS)
            packer.pack_int(300)

            POINTS = len(tmp)
            packer.pack_int(POINTS)

            logger.debug('Now uploading %s', inverterId)

            for key in tmp.keys():
                channel = '_'.join(key.replace('(', '').replace(')', '').split(' '))
                sc.addChannel(server, auth_token, deviceId, inverterId, channel, channel, channel)

                for item in tmp[key].iteritems():
                    val = item[1]
                    timestamp = item[0].to_pydatetime().timestamp() * 1000000000
                    packer.pack_hyper(int(timestamp))
                    packer.pack_float(float(val))

                data = packer.get_buffer()
                sc.uploadData(server, auth_token, deviceId, inverterId, channel, data)
            ctr = ctr + 1
def processAgilentFile(file):
    logger = logging.getLogger('solaroad.agilent')
    numSkipRows = 0
    num_lines = sum(1 for line in open(file, encoding="utf-16")) - 1
    with open(file, encoding="utf-16") as csvFile:
        line = csvFile.readline()
        while line[:5] != "Scan,":
            if numSkipRows >= num_lines:
                logger.debug('There was no data to read from %s! Skipping!',
                             file)
                return
            numSkipRows += 1
            line = csvFile.readline()
    x = pd.read_csv(file,
                    delimiter=',',
                    skip_blank_lines=True,
                    skiprows=numSkipRows,
                    encoding="utf-16")
    index_time = pd.to_datetime(x['Time'], format="%d-%m-%Y %H:%M:%S:%f")
    x.index = index_time
    for key in x.keys():
        if 'Unnamed' in key:
            del (x[key])
    x = x.dropna()
    del (x['Time'])
    del (x['Scan'])

    server, auth_token = sc.authenticate()
    deviceId = sc.getDeviceId()

    # Add Sensor
    logger.debug(
        '======================== Now processing Agilent ========================'
    )
    sc.addSensor(server, auth_token, deviceId, SENSOR_NAME, SENSOR_NAME,
                 SENSOR_NAME, SENSOR_NAME)

    # Pre-processing
    y = x.resample('120S').mean().interpolate(method='linear')

    # Break DataFrame into chunks of 100k
    ctr = 0
    total_steps = np.round(len(x) / sc.MAX_POINTS) + 1
    while ctr < total_steps:
        sp = ctr * sc.MAX_POINTS
        tmp = y.iloc[sp:sp + sc.MAX_POINTS - 1, :]
        logger.debug(
            '--------------------- RECORD %s/%s ------------------------------',
            str(ctr + 1), str(total_steps))
        for key in tmp.keys():
            channel = '_'.join(
                key.replace('(',
                            '').replace(')',
                                        '').replace('<',
                                                    '').replace('>',
                                                                '').split(' '))
            packer = xdrlib.Packer()
            packer.pack_int(1)  # version 1

            packer.pack_enum(sc.SECONDS)
            packer.pack_int(120)

            POINTS = len(tmp[key])
            packer.pack_int(POINTS)

            logger.debug('Now uploading %s', key)

            if ctr == 0:
                sc.addChannel(server, auth_token, deviceId, SENSOR_NAME,
                              channel, channel, channel)

            for item in tmp[key].iteritems():
                val = item[1]
                timestamp = item[0].to_pydatetime().timestamp() * 1000000000
                packer.pack_hyper(int(timestamp))
                packer.pack_float(float(val))

            data = packer.get_buffer()
            sc.uploadData(server, auth_token, deviceId, SENSOR_NAME, channel,
                          data)
        ctr = ctr + 1
def processFlirBicycleFile(file):
    logger = logging.getLogger('solaroad.flirbicycle')
    x = pd.read_csv(file,
                    delimiter=',',
                    skip_blank_lines=True,
                    skipinitialspace=True)
    index_time = pd.to_datetime(x['Time'], format="%d/%m/%Y %H:%M:%S")
    x.index = index_time
    for key in x.keys():
        if 'Unnamed' in key:
            del (x[key])
    del (x['Time'])
    x = x.dropna()

    for zone in x['Zone'].unique():
        # Authenticate
        server, auth_token = sc.authenticate()
        deviceId = sc.getDeviceId()

        # Add Sensor
        logger.debug(
            '======================== Now processing Zone %s ========================',
            zone)
        sc.addSensor(server, auth_token, deviceId, str(zone), 'Flir-Bicycle',
                     str(zone), str(zone))

        # Pre-processing
        y = x.resample('3600S').mean().interpolate(method='linear')
        del (y['Zone'])

        # Break DataFrame into chunks of 100k
        ctr = 0
        total_steps = np.round(len(x) / sc.MAX_POINTS) + 1
        while ctr < total_steps:
            sp = ctr * sc.MAX_POINTS
            tmp = y.iloc[sp:sp + sc.MAX_POINTS - 1, :]
            logger.debug(
                '--------------------- RECORD %s/%s of %s ------------------------------',
                ctr + 1, total_steps, zone)
            for key in tmp.keys():
                channel = '_'.join(
                    key.replace('(', '').replace(')',
                                                 '').replace('#',
                                                             '').split(' '))
                packer = xdrlib.Packer()
                packer.pack_int(1)  # version 1

                packer.pack_enum(sc.SECONDS)
                packer.pack_int(3600)

                POINTS = len(tmp[key])
                packer.pack_int(POINTS)

                logger.debug('Now uploading %s', key)

                if ctr == 0:
                    sc.addChannel(server, auth_token, deviceId, str(zone),
                                  channel, channel, channel)

                for item in tmp[key].iteritems():
                    val = item[1]
                    timestamp = item[0].to_pydatetime().timestamp(
                    ) * 1000000000
                    packer.pack_hyper(int(timestamp))
                    packer.pack_float(float(val))

                data = packer.get_buffer()
                sc.uploadData(server, auth_token, deviceId, str(zone), channel,
                              data)
            ctr = ctr + 1
示例#7
0
def processGillFile(file):

    logger = logging.getLogger('solaroad.gill')
    x = pd.read_csv(file,
                    delimiter=',',
                    skiprows=1,
                    header=None,
                    skip_blank_lines=True)
    x.columns = [
        'SlNo', 'Reporting Time', 'Node', 'Pressure', 'Relative Humidity',
        'Temperature', 'Dew Point', 'Solar Radiation', 'Measured Time',
        'Voltage', 'Status', 'EndChar'
    ]
    x.index = pd.to_datetime(x['Measured Time'], format="%Y-%m-%dT%H:%M:%S.%f")
    x.index.name = 'Timestamp'
    del (x['SlNo'])
    del (x['Node'])
    del (x['Reporting Time'])
    del (x['Measured Time'])
    del (x['Voltage'])
    del (x['Status'])
    del (x['EndChar'])
    x = x.dropna()

    sensorName = 'GillMaximetGMX301'
    logger.debug(
        '======================== Now processing %s ========================',
        sensorName)

    server, auth_token = sc.authenticate()
    deviceId = sc.getDeviceId()

    x = x.resample('5min').mean().interpolate(method='linear')

    ctr = 0
    total_steps = np.round(len(x) / sc.MAX_POINTS) + 1
    while ctr < total_steps:
        sp = ctr * sc.MAX_POINTS
        tmp = x.iloc[sp:sp + sc.MAX_POINTS - 1, :]
        logger.debug(
            '--------------------- RECORD %s/%s ------------------------------',
            ctr + 1, total_steps)

        sc.addSensor(server, auth_token, deviceId, sensorName, sensorName,
                     sensorName, sensorName)

        logger.debug('Now uploading %s', sensorName)

        for key in tmp.keys():
            packer = xdrlib.Packer()
            packer.pack_int(1)  # version 1

            packer.pack_enum(sc.SECONDS)
            packer.pack_int(300)

            POINTS = len(tmp)
            packer.pack_int(POINTS)

            channel = '_'.join(
                key.replace('(', '').replace(')', '').split(' '))
            sc.addChannel(server, auth_token, deviceId, sensorName, channel,
                          channel, channel)

            logger.debug('Now uploading %s', channel)

            for item in tmp[key].iteritems():
                val = item[1]
                timestamp = item[0].to_pydatetime().timestamp() * 1000000000
                packer.pack_hyper(int(timestamp))
                packer.pack_float(float(val))

            data = packer.get_buffer()
            sc.uploadData(server, auth_token, deviceId, sensorName, channel,
                          data)
        ctr = ctr + 1