def dado_campbell():  # Conecta com CR1000 e coleta os dados
    try:
        print('Conectando com Campbell CR1000... ', end='')
        device = CR1000.from_url('serial:/dev/ttyUSB0:38400')
        print('OK')

    except Exception:
        print(
            'ERRO\n---------------------------- 60 min ---------------------------'
        )
        time.sleep(1200)
        print('Conectando com Campbell CR1000', end='')
        time.sleep(3)
        print('.', end='')
        time.sleep(3)
        print('.', end='')
        time.sleep(3)
        print('. ', end='')
        try:
            device = CR1000.from_url('serial:/dev/ttyUSB0:38400')
            print('OK')
        except Exception:
            print(
                'ERRO\n---------------------------- EXIT ----------------------------'
            )
            sys.exit()

    return device
Example #2
0
    def UpdateNow(self):
        addr = "tcp:ip:port"
        print("UNFINISHED: TODO")
        return

        try:
            start = SensorData.objects.latest('timestamp').timestamp
        except ObjectDoesNotExist:
            print("No prevouse data records found.")
            print("Downloading all logger data...")
            device = CR1000.from_url(addr)
            dataList = device.get_data('Ten_Min')
        else:
            print("Beginning download of all data recorded since " +
                  str(start))
            device = CR1000.from_url(addr)
            dataList = device.get_data('Ten_Min', start)

        toSaveList = []
        for record in dataList:
            ts = record['TIMESTAMP']
            del record['TIMESTAMP']
            for val, col in record.items():
                try:
                    dat = SensorData(sensor=None,
                                     timestamp=ts,
                                     val_type=None,
                                     val=float(val))
                    dat.full_clean()
                    toSaveList.append(dat)
                except:
                    continue

        for dat in toSaveList:
            dat.save()
Example #3
0
 def __init__(self, client, sensordict, confdict):
     self.client = client
     self.sensordict = sensordict 
     self.confdict = confdict
     # variables for broadcasting via mqtt:
     self.count=0
     self.datalst = []
     #self.datacnt = 0
     self.metacnt = 10
     ###
     port = confdict['serialport']+sensordict.get('port')
     baudrate = sensordict.get('baudrate')
     # string should look like that: device = CR1000.from_url('serial:/dev/ttyUSB3:38400')
     #try: 
     log.msg('connecting to device...')
     self.device = CR1000.from_url('serial:{}:{}'.format(port,baudrate))
     #except:
         # TODO no CR1000 present... "Can not access to device"
         #log.msg('Can not access to device')
     tables = self.device.list_tables()
     if not tables == ['Status', 'SamplesEvery2s', 'ValuesEveryMinute', 'Public']:
         # TODO log-Befehl, Abbruch!
         log.msg('CR1000 not configured for Judd JC')
     else:
         self.device.settime(datetime.utcnow())
         # TODO folgende Ausgabe nur fuers Programmieren!!
         log.msg('++++++++ Information von den Konfig-Dateien +++++++++++++')
         log.msg('++ client:')
         log.msg(client)
         log.msg('++ sensordict:')
         log.msg(sensordict)
         log.msg('++ confdict:')
         log.msg(confdict)
Example #4
0
def connection():
    device = CR1000.from_url(CRX_URL)

    if device.ping_node():
        yield device
    else:
        raise Exception("CRX did not respond to ping")

    device.bye()
Example #5
0
def get_data(url, start_date, end_date, dir_path):
    """
    Extract data from the campbell data logger for each specified table and save to a daily csv file between the date ranges specified.
    Default tables are: Housekeeping, GPS_datetime, SoilTemperature, SoilMoisture, SoilHeatFlux and Radiation
    
    :param url: (str) URL for connection with logger in format 'tcp:iphost:port' or 'serial:/dev/ttyUSB0:19200:8N1'
    :param start_date: (datetime.datetime) The start date from which to collect data
    :param end_date: (datetime.datetime) The end date after which to stop collecting data. (the end date will be included in the data.) 
    :param dir_path: (str) The path to the top level directory in which to create the csv files and folders.
    :returns: None
    """
    device = CR1000.from_url(url)

    # device.list_tables():
    # ['Status', 'Housekeeping', 'GPS_datetime', 'SoilTemperature', 'SoilMoisture', 'SoilHeatFlux', 'Radiation', 'DataTableInfo', 'Public']
    tables = CONFIG['common']['logger_tables']

    while start_date <= end_date:

        for table in tables:

            end_of_day = start_date + timedelta(hours=23, minutes=59, seconds=59, microseconds=59)

            csv_dirs = os.path.join(dir_path, table)
            csv_name = f"{table}_{start_date.strftime('%Y-%m-%d')}.csv"
            csv_path = os.path.join(csv_dirs, csv_name)

            # create csv path
            if not os.path.exists(csv_dirs):
                os.makedirs(csv_dirs)

            # if file doesn't exist - make it
            if not os.path.isfile(csv_path):
                open(csv_path, 'w').close()

            # open the csv file
            try:
                df = pd.read_csv(csv_path)
            except pd.errors.EmptyDataError:
                get_data_from_range(device, table, csv_path, start_date, end_of_day, header=True)
                continue

            if df.empty:
                get_data_from_range(device, table, csv_path, start_date, end_of_day, header=False)

            else:
                # if the file already has data in it, it will just update the existing data from latest entry onwards
                # get lastest date before updating - add a microsecond on, so no issues with duplication of record
                latest = datetime.strptime(df['Datetime'].iloc[-1], "%Y-%m-%d %H:%M:%S") + timedelta(microseconds=1)
                get_data_from_range(device, table, csv_path, latest, end_of_day, header=False)

            print(f"Completed for {table} for {start_date.strftime('%Y-%m-%d')}")
            time.sleep(3)

        start_date = start_date + timedelta(1)
Example #6
0
    def UpdateNow(self):
        addr = "tcp:ip:port"
        print("UNFINISHED: TODO")
        return

        try:
            start = SensorData.objects.latest('timestamp').timestamp
        except ObjectDoesNotExist:
            print("No prevouse data records found.")
            print("Downloading all logger data...")
            device = CR1000.from_url(addr)
            dataList = device.get_data('Ten_Min')
        else:
            print("Beginning download of all data recorded since " +
                  str(start))
            device = CR1000.from_url(addr)
            dataList = device.get_data('Ten_Min', start)

        toSaveList = []
        for record in dataList:
            ts = record['TIMESTAMP']
            del record['TIMESTAMP']
            for val, col in record.items():
                try:
                    dat = SensorData(
                        sensor=None,
                        timestamp=ts,
                        val_type=None,
                        val=float(val))
                    dat.full_clean()
                    toSaveList.append(dat)
                except:
                    continue

        for dat in toSaveList:
            dat.save()
Example #7
0
    def finddata(self):
        modem_on(1)
        cr1000_on(1)
        sleep(90)
        device = CR1000.from_url('tcp:192.168.0.30:6785')
        data = device.get_data('Public')
        # print(data[0])
        # finds strings inbetween parenthesis

        Timestamp = str(data[0]['Datetime'])
        RecNbr = str(data[0]['RecNbr'])
        Batt_volt = str(data[0]['Batt_volt'])
        Ptemp_C = str(data[0]['Ptemp_C'])
        R6 = str(data[0]['R6'])
        R10 = str(data[0]['R10'])
        R20 = str(data[0]['R20'])
        R40 = str(data[0]['R40'])
        R2_5 = str(data[0]['R2_5'])
        R4_5 = str(data[0]['R4_5'])
        R6_5 = str(data[0]['R6_5'])
        R8_5 = str(data[0]['R8_5'])
        T6 = str(data[0]['T6'])
        T10 = str(data[0]['T10'])
        T20 = str(data[0]['T20'])
        T40 = str(data[0]['T40'])
        T2_5 = str(data[0]['T2_5'])
        T4_5 = str(data[0]['T4_5'])
        T6_5 = str(data[0]['T6_5'])
        T8_5 = str(data[0]['T8_5'])
        dt = str(data[0]['DT'])
        Q = str(data[0]['Q'])
        tcdt = str(data[0]['TCDT'])
        labels = [
            'Timestamp', 'RecNbr', 'Batt_volt', 'Ptemp_C', 'R40', 'R6', 'R10',
            'R20', 'R2_5', 'R4_5', 'R6_5', 'R8_5', 'T6,', 'T10', 'T20', 'T40',
            'T2_5', 'T4_5', 'T6_5', 'T8_5', 'DT', 'Q', 'TCDT'
        ]
        values = [
            Timestamp, RecNbr, Batt_volt, Ptemp_C, R6, R10, R20, R2_5, R4_5,
            R6_5, R8_5, R40, T6, T6_5, T10, T20, T40, T2_5, T4_5, T8_5, dt, Q,
            tcdt
        ]
        return labels, values
Example #8
0
def crpull(ip, port, tstart, tend):
    print('Connecting to device...')
    device = CR1000.from_url('tcp:' + str(ip) + ':' + str(port), timeout=10)
    if len(tstart) > 0:
        tstart = datetime.strptime(tstart, '%Y-%m-%d %H:%M:%S')
        tend = datetime.strptime(tend, '%Y-%m-%d %H:%M:%S')
        print('Pulling data from the Dat table after ' + str(tstart) + '...')
        rawdata = device.get_data('Dat', tstart, tend)
    else:
        print('Pulling all data from the Dat table...')
        rawdata = device.get_data('Dat')
        device.bye()
    if len(rawdata) > 0:
        orderedcol = rawdata.filter(('Datetime', 'RecNbr', 'Year', 'jDay', 'HH', 'MM', 'SS',
                                     'batt_volt_Min', 'PTemp_Avg', 'Room_T_Avg', 'IRGA_T_Avg', 
                                     'IRGA_P_Avg', 'MF_Controller_mLmin_Avg', 'PressureVolt_Avg', 
                                     'RH_voltage_Avg', 'Gas_T_Avg', 'rawCO2_Voltage_Avg', 
                                     'rawCO2_Avg', 'rawH2O_Avg', 'ID', 'Program'))
        newdata = orderedcol.to_csv(header=False).split('\r\n')
        return(newdata)
    else:
        return('')
Example #9
0
 def cr_read(self):
     try:
         modem_on(1)
         is_on = is_on_checker(0, 5)
         if not is_on:
             # Turn on CR1000x
             cr1000_on(1)
             sleep(15)
     except:
         printf("Problem with port or problem with power to the CR1000")
         traceback.print_exc(
             file=open("/media/mmcblk0p1/logs/system.log", "a+"))
     else:
         # Read data
         device = CR1000.from_url('tcp:192.168.0.30:6785')
         data = device.get_data('Public')
     finally:
         if not is_on:
             # Turn off CR1000
             cr1000_off(1)
             modem_off(1)
     return data
NTU2_15_MedColm = 'TurbNTU_Med'
NTU3_15_MedColm = 'TurbNTU2_Med'

NTU2_24_MedColm = 'TurbNTU_Med'
NTU3_24_MedColm = 'TurbNTU2_Med'

# Initialize field variables
NTU2_15_Med = 0
NTU3_15_Med = 0
NTU2_24_Med = 0
NTU3_24_Med = 0
# Holds the table that contains the data we're plotting
dataTable = 'TableEachScan'
# The device we're connecting to,
device = CR1000.from_url('serial:/' + location + ":" + port)
# Get all tables from device
tables = device.list_tables()
# Get Streaming Tokens from plot.ly
stream_ids = tls.get_credentials_file()['stream_ids']
# Grab first Token in list
stream_id = stream_ids[0]
# Used to check if header should be used
has_ran = False
# File descriptor for log file
log_file = os.open("logfile.txt", os.O_RDWR | os.O_APPEND | os.O_CREAT)

# Set up traces for plot
dailyTurbidMed = dict(
    x=[],
    y=[],
Example #11
0
        def sendRequest(self):
            # TODO wohin mit debug?
            debug = False
            if self.reconnect.is_set():
                log.msg('exiting, mutex locked!')
                return
            t = datetime.utcnow()
            past = t - timedelta(seconds=3)
            vals = self.device.get_data('SamplesEvery2s', past, t)
            # vals[0] because we grap no older data, there is only one value in 2 seconds
            # timestamp directly from datetime into array
            # TODO Roman fragen, ob oder wie Vergleich mit Computerzeit
            try:
                darray = datetime2array(vals[0]['Datetime'])
                # TODO "again" ist Provisorium
                again = False
            except:
                again = True
            try:
                if again:
                    t = datetime.utcnow()
                    past = t - timedelta(seconds=3)
                    darray = datetime2array(vals[0]['Datetime'])
                    log.msg("IT TOOK A SECOND TIME TO GET DATA PROPERLY!")
            except:
                # there will be no log messages when the logger is turned off
                return
                # TODO reconnect Loesung, nur wenn sie sauber funktioniert!
                log.msg('NO DATA FROM CR1000 !!! - vals:')
                log.msg(vals)
                port = self.confdict['serialport'] + self.sensordict.get(
                    'port')
                baudrate = self.sensordict.get('baudrate')
                self.reconnect.set()
                connected = False
                while not connected:
                    self.device.bye()
                    log.msg('reconnecting to device...')
                    time.sleep(5)
                    try:
                        self.device = CR1000.from_url('serial:{}:{}'.format(
                            port, baudrate))
                        tables = self.device.list_tables()
                        if tables == [
                                'Status', 'SamplesEvery2s',
                                'ValuesEveryMinute', 'Public'
                        ]:
                            connected = True
                            log.msg('schaut ok aus...')
                            time.sleep(2)
                    except:
                        log.msg('reconnect wohl missglueckt!')
                    try:
                        past = t - timedelta(seconds=3)
                        vals = self.device.get_data('SamplesEvery2s',
                                                    past,
                                                    t,
                                                    debug=True)
                        log.msg(SENSOR_HEIGHT * 1000. -
                                vals[0]['DiffVolt'] * 250.)
                    except:
                        log.msg('...wohl doch nicht!')
                        connected = False
                self.reconnect.clear()
                log.msg('mutex released...')
                return
            if debug:
                log.msg('getting data...')
            # snowheight (1000mV is 250cm) - values from CR1000 in mV, factor 1000 for packing
            snowheight = SENSOR_HEIGHT * 1000. - vals[0]['DiffVolt'] * 250.
            darray.append(int(round(snowheight)))
            # TODO weg
            if debug:
                log.msg(darray)

            # preparations for file save
            # date 6 short microsecond unsigned long snowheight signed long
            # TODO alter packcode!
            packcode = "6hLl"
            #packcode = "<6hLl"
            # header
            sensorid = self.sensordict['sensorid']
            header = "# MagPyBin %s %s %s %s %s %s %d" % (
                sensorid, '[f]', '[JC]', '[cm]', '[1000]', packcode,
                struct.calcsize(packcode))
            data_bin = struct.pack(packcode, *darray)
            # date of dataloggers timestamp
            filedate = datetime.strftime(
                datetime(darray[0], darray[1], darray[2]), "%Y-%m-%d")
            if not self.confdict.get('bufferdirectory', '') == '':
                acs.dataToFile(self.confdict.get('bufferdirectory'), sensorid,
                               filedate, data_bin, header)
                if debug:
                    log.msg('Daten gesichert...')

            # sending via MQTT
            data = ','.join(list(map(str, darray)))
            head = header
            topic = self.confdict.get('station') + '/' + self.sensordict.get(
                'sensorid')
            coll = int(self.sensordict.get('stack'))
            if coll > 1:
                self.metacnt = 1  # send meta data with every block
                if self.datacnt < coll:
                    self.datalst.append(data)
                    self.datacnt += 1
                else:
                    senddata = True
                    data = ';'.join(self.datalst)
                    self.datalst = []
                    self.datacnt = 0
            else:
                senddata = True

            if senddata:
                self.client.publish(topic + "/data", data)
                if self.count == 0:
                    self.client.publish(topic + "/meta", head)
                self.count += 1
                if self.count >= self.metacnt:
                    self.count = 0

            # right now auxiliary data only in the log file
            if t.second < 2:
                # every minute aux data (battery voltage and logger temperature) will be available
                # going 61s into the past to make sure there are already data
                past = t - timedelta(seconds=61)
                aux = self.device.get_data('ValuesEveryMinute', past, t)
                log.msg('----- aux every minute:')
                # timestamp directly from datetime into array
                try:
                    darray = datetime2array(aux[0]['Datetime'])
                except:
                    # following should never happen...
                    log.msg('AUXILIARY DATA NOT GOT PROPERLY! - aux:')
                    log.msg(aux)
                    log.msg('trying again...')
                    past = t - timedelta(seconds=62)
                    aux = self.device.get_data('ValuesEveryMinute', past, t)
                    try:
                        darray = datetime2array(aux[0]['Datetime'])
                    except:
                        log.msg('giving up...')
                        return

                # battery voltage - factor 1000 for packing
                BattV_Min = int(round(aux[0]['BattV_Min'] * 1000))
                PTemp_C_Avg = int(round(aux[0]['PTemp_C_Avg'] * 1000))
                darray.extend([BattV_Min, PTemp_C_Avg])
                # alernative reading:
                #aux = (aux.filter(('Datetime', 'BattV_Min','PTemp_C_Avg')).to_csv(header=False))
                log.msg(darray)
                packcode = "<6hLLl"
Example #12
0
# Check system platform, if windows, we need to open files in binary mode
platform = platform.system()

# Holds the device's mapped location
if platform == 'Linux':
    location = "dev/ttyO4"
elif platform == 'Windows':
    location = "COM1"
else:
    location = "COM1"

# Holds the port on which we're communicating with the device
port = "115200"

# The device we're connecting to,
device = CR1000.from_url('serial:/' + location + ":" + port)

# Get all tables from device
tables = device.list_tables()

# Start date for data  collection, should be fifteen minutes in the past
start_date_form = datetime.now() - timedelta(minutes=15)

# End date for  data collection, should be now, to complete our 15 minute interval
end_date_form = datetime.now()
"""
" Function which takes in a table name, gathers its data and exports it as a CSV file for analysis.
" @:param table_name - name of table to collect data and export
"""

Example #13
0
#logger_address='serial:/dev/serial/by-id/usb-Prolific_Technology_Inc._USB-Serial_Controller_D-if00-port0:38400'
#device = CR1000.from_url('serial:/dev/serial/by-id/usb-Prolific_Technology_Inc._USB-Serial_Controller_D-if00-port0:38400')
#time.sleep(5)
#device = CR1000.from_url(logger_address)

file_name = 'campbell_output.csv'

fid = open(file_name, 'a', 0)

while True:
    log_attempt = 1
    while log_attempt < 10:
        try:
            device = CR1000.from_url(
                'serial:/dev/serial/by-id/usb-Prolific_Technology_Inc._USB-Serial_Controller_D-if00-port0:38400'
            )
            data = device.get_data('WPS_Tab_noarray', time_previous,
                                   time_current)
            break
        except Exception, e:
            # https://stackoverflow.com/questions/1483429/how-to-print-an-error-in-python
            print time_current.strftime(
                "%d/%b/%Y %H:%M:%S") + " extracting failed " + str(
                    log_attempt) + ' ' + str(e)
            log_attempt += 1
            time.sleep(10)
            #device = CR1000.from_url('serial:/dev/serial/by-id/usb-Prolific_Technology_Inc._USB-Serial_Controller_D-if00-port0:38400')
            continue
    data[0]['Datetime'] = data[0]['Datetime'].strftime("%d/%b/%Y %H:%M:%S")
    #data['Datetime'] =data['Datetime'].strftime("%d/%b/%Y %H:%M:%S")
def log(url, dir_path, set_time=False):
    """
    Extract the data from the campbell data logger for each specified table and save to a daily csv file.
    This will backfill the file to get all data from the start of the day or update from the latest data entry if data already exists in the file.
    Default tables are: Housekeeping, GPS_datetime, SoilTemperature, SoilMoisture, SoilHeatFlux and Radiation
    If set_time=True the logger time will be updated when the script runs at midnight. Default is False.
    
    :param url: (str) URL for connection with logger in format 'tcp:iphost:port' or 'serial:/dev/ttyUSB0:19200:8N1'
    :param dir_path: (str) The path to the top level directory in which to create the csv files and folders.
    :param set_time: (boolean) If True, the logger time will be updated when the script runs at midnight. Default is False.
    :returns: None
    """
    device = CR1000.from_url(url)

    # first check if it's midnight (utc) & sync time, if set_time=True
    if set_time:
        try:
            c = ntplib.NTPClient()
            start_time = datetime.utcfromtimestamp(
                c.request('pool.ntp.org').tx_time)

            if start_time.hour == "0" and start_time.minute == "0":
                device.set_time(
                    datetime.utcfromtimestamp(
                        c.request('pool.ntp.org').tx_time))

        except:
            print("Could not sync with time server.")

    # device.list_tables():
    # ['Status', 'Housekeeping', 'GPS_datetime', 'SoilTemperature', 'SoilMoisture', 'SoilHeatFlux', 'Radiation', 'DataTableInfo', 'Public']
    tables = CONFIG['common']['logger_tables']
    date = datetime.utcnow().strftime("%Y-%m-%d")

    for table in tables:

        csv_dirs = os.path.join(dir_path, table)
        csv_name = f"{table}_{date}.csv"
        csv_path = os.path.join(csv_dirs, csv_name)

        # create csv path
        if not os.path.exists(csv_dirs):
            os.makedirs(csv_dirs)

        # if file doesn't exist - make it
        if not os.path.isfile(csv_path):
            open(csv_path, 'w').close()

        # open the csv file
        try:
            df = pd.read_csv(csv_path)
        except pd.errors.EmptyDataError:
            get_todays_data(url, table, csv_path)
            continue

        if df.empty:
            get_todays_data(url, table, csv_path)

        else:
            cmd = f"pycr1000 update {url} {table} {csv_path}"
            subprocess.call(cmd, shell=True)