Exemplo n.º 1
0
    def __init__(self, rld_dir='', out_dir='', encryption_pass='', hex_key='', filename='',
                 sympro_path=r'"C:/Program Files (x86)/Renewable NRG Systems/SymPRO Desktop/SymPRODesktop.exe"',
                 process_type='convert', convert_type='meas', nec='', site_filter='', site_file='', **kwargs):

        self.rld_dir = windows_folder_path(rld_dir)
        self.out_dir = windows_folder_path(out_dir)
        self.encryption_pass = encryption_pass
        self.hex_key = hex_key
        self.sympro_path = sympro_path
        self.process_type = process_type
        self.convert_type = convert_type
        self.nec = nec
        self.site_filter = site_filter
        self.site_file = site_file

        if 'file_filter' in kwargs and site_filter == '':
            self.file_filter = kwargs.get('file_filter')
            self.site_filter = self.file_filter

        if check_platform() == 'win32':
            if filename:
                affirm_directory(self.out_dir)
                self.single_file(filepath=filename)
        else:
            print("""
            convert_rld.local() method ONLY compatible with Windows OS.
            Please use convert_rld.nrg_convert_api() method instead.
            """)
Exemplo n.º 2
0
    def __init__(self,
                 rwd_dir='',
                 out_dir='',
                 filename='',
                 encryption_pin='',
                 sdr_path=r'C:/NRG/SymDR/SDR.exe',
                 convert_type='meas',
                 file_filter='',
                 wine_folder='~/.wine/drive_c/',
                 use_site_file=False,
                 raw_mode=False,
                 progress_bar=True,
                 show_result=True,
                 **kwargs):

        if encryption_pin != '':
            self.command_switch = '/z'  # noqueue with pin
        else:
            self.command_switch = '/q'  # noqueue (logger params)
        if use_site_file:
            self.command_switch = '/s'  # silent (site file params)
        if raw_mode:
            self.command_switch = '/r'  # silent (site file params)

        self.filename = filename
        self.progress_bar = progress_bar
        self.encryption_pin = encryption_pin
        self.sdr_path = windows_folder_path(sdr_path)[:-1]
        self.root_folder = "\\".join(self.sdr_path.split('\\')[:-2])
        self.RawData = self.root_folder + '\\RawData\\'
        self.ScaledData = self.root_folder + '\\ScaledData\\'
        self.file_filter = file_filter

        if 'site_filter' in kwargs and file_filter == '':
            self.file_filter = kwargs.get('site_filter')

        self.rwd_dir = windows_folder_path(
            rwd_dir)  # rwd_dir must be in Windows format, even if using Wine
        self.show_result = show_result
        self.platform = check_platform()
        self.wine_folder = wine_folder
        self.check_sdr()

        if self.platform == 'win32':
            self.out_dir = windows_folder_path(out_dir)
            self.file_path_joiner = '\\'
        else:
            self.out_dir = linux_folder_path(out_dir)
            self.file_path_joiner = '/'

        if self.filename:
            self.counter = 1
            self.rwd_dir = os.path.dirname(self.filename)
            self.file_filter = os.path.basename(self.filename)
            self.convert()
Exemplo n.º 3
0
    def concat_txt(self,
                   txt_dir='',
                   file_type='meas',
                   file_filter='',
                   filter2='',
                   start_date='1970-01-01',
                   end_date='2150-12-31',
                   ch_details=False,
                   output_txt=False,
                   out_file='',
                   progress_bar=True,
                   **kwargs):
        """Will concatenate all text files in the txt_dir

        files must match the site_filter argument. Note these are both blank by default.

        Parameters
        ----------
        txt_dir : str
            directory holding txt files
        file_type : str
            type of export (meas, event, comm, sample, etc...)
        file_filter : str
            text filter for txt files, like site number, etc.
        filter2 : str
            secondary text filter
        start_date : str
            for filtering files to concat based on date "YYYY-mm-dd"
        end_date : str
            for filtering files to concat based on date "YYYY-mm-dd"
        ch_details : bool
            show additional info in ch_info dataframe
        output_txt : bool
            create a txt output of data df
        out_file : str
            filename to write data dataframe too if output_txt = True
        progress_bar : bool
            show bar on concat [True] or list of files [False]

        Returns
        ---------
        ch_info : obj
            pandas dataframe of ch_list (below) pulled out of file with sympro_txt_read.arrange_ch_info()
        ch_list : list
            list of channel info; can be converted to json w/ import json ... json.dumps(fut.ch_info)
        data : obj
            pandas dataframe of all data
        head : obj
            lines at the top of the txt file..., used when rebuilding timeshifted files
        site_info : obj
            pandas dataframe of site information
        logger_sn : str
        ipack_sn : str
        logger_type : str
        ipack_type : str
        latitude : float
        longitude : float
        elevation : int
        site_number : str
        site_description : str
        start_date : str
        txt_file_names : list
            list of files included in concatenation

        Examples
        --------
        Read files into nrgpy reader object

        >>> import nrgpy
        >>> reader = nrgpy.sympro_txt_read()
        >>> reader.concat_txt(
                txt_dir='/path/to/txt/files/',
                file_filter='123456', # site 123456
                start_date='2020-01-01',
                end_date='2020-01-31',
            )
        Time elapsed: 2 s | 33 / 33 [=============================================] 100%
        Queue processed
        >>> reader.logger_sn
        '820600019'
        >>> reader.ch_info
         	Bearing: 	Channel: 	Description: 	Effective Date: 	Height: 	Offset: 	Scale Factor: 	Serial Number: 	Type: 	Units:
        0 	50.00 	    1 	        NRG S1 	        2020-01-31 00:00:00 	33.00 	0.13900 	0.09350 	    94120000059 	Anemometer 	m/s
        1 	230.00 	    2 	        NRG S1 	        2020-01-31 00:00:00 	0.00 	0.13900 	0.09350 	    94120000058 	Anemometer 	m/s
        2 	50.00 	    3 	        NRG S1 	        2020-01-31 00:00:00 	22.00 	0.13900 	0.09350 	    94120000057 	Anemometer 	m/s
        3 	230.00 	    4 	        NRG 40C Anem 	2020-01-31 00:00:00 	22.00 	0.35000 	0.76500 	    179500324860 	Anemometer 	m/s
        4 	50.00 	    5 	        NRG 40C Anem 	2020-01-31 00:00:00 	12.00 	0.35000 	0.76500 	    179500324859 	Anemometer 	m/s
        5 	230.00 	    6 	        NRG S1 	        2020-01-31 00:00:00 	12.00 	0.13900 	0.09350 	    94120000056 	Anemometer 	m/s
        6 	320.00 	    13 	        NRG 200M Vane 	2020-01-31 00:00:00 	32.00 	-1.46020 	147.91100 	    10700000125 	Vane 	        Deg
        7 	320.00 	    14 	        NRG 200M Vane 	2020-01-31 00:00:00 	21.00 	-1.46020 	147.91100 	    10700000124 	Vane 	        Deg
        8 	0.00 	    15 	        NRG T60 Temp 	2020-01-31 00:00:00 	34.00 	-40.85550 	44.74360 	    9400000705          Analog          C
        9 	0.00 	    16 	        NRG T60 Temp 	2020-01-31 00:00:00 	2.00 	-40.85550 	44.74360 	    9400000xxx          Analog          C
        10 	0.00 	    17 	        NRG RH5X Humi 	2020-01-31 00:00:00 	0.00 	0.00000 	20.00000 	    NaN 	        Analog          %RH
        11 	0.00 	    20 	        NRG BP60 Baro 	2020-01-31 00:00:00 	0.00 	495.27700 	243.91400 	    NaN 	        Analog          hPa
        12 	0.00 	    21 	        NRG BP60 Baro 	2020-01-31 00:00:00 	2.00 	495.04400 	244.23900 	    9396FT1937          Analog  	hPa
        """

        if 'site_filter' in kwargs and file_filter == '':
            self.file_filter = kwargs.get('site_filter')
        else:
            self.file_filter = file_filter

        self.ch_details = ch_details
        self.start_date = start_date
        self.end_date = end_date
        self.filter2 = filter2
        self.file_type = file_type
        self.txt_file_names = []

        if check_platform() == 'win32':
            self.txt_dir = windows_folder_path(txt_dir)
        else:
            self.txt_dir = linux_folder_path(txt_dir)

        first_file = True

        files = [
            f for f in sorted(glob(self.txt_dir + '*.txt'))\
            if self.file_filter in f and self.filter2 in f\
            and date_check(self.start_date, self.end_date, f)
        ]

        self.file_count = len(files)
        self.pad = len(str(self.file_count))
        self.counter = 1
        self.start_time = datetime.now()

        for f in files:

            if progress_bar:
                draw_progress_bar(self.counter, self.file_count,
                                  self.start_time)
            else:
                print("Adding {0}/{1} ... {2} ... ".format(
                    str(self.counter).rjust(self.pad),
                    str(self.file_count).ljust(self.pad), os.path.basename(f)),
                      end="",
                      flush=True)

            if first_file:
                first_file = False

                try:
                    base = sympro_txt_read(
                        f, text_timestamps=self.text_timestamps)
                    if progress_bar != True: print("[OK]")
                    self.txt_file_names.append(os.path.basename(f))
                except IndexError:
                    print('Only standard SymPRO headertypes accepted')
                    break
                except:
                    if progress_bar != True: print("[FAILED]")
                    print("could not concat {0}".format(os.path.basename(f)))
                    pass
            else:
                file_path = f

                try:
                    s = sympro_txt_read(
                        file_path,
                        ch_details=self.ch_details,
                        text_timestamps=self.text_timestamps,
                        site_details=False,
                    )
                    base.data = base.data.append(s.data)
                    base.ch_info = base.ch_info.append(s.ch_info)
                    if progress_bar != True: print("[OK]")
                    self.txt_file_names.append(os.path.basename(f))

                except:
                    if progress_bar != True: print("[FAILED]")
                    print("could not concat {0}".format(os.path.basename(f)))
                    pass

            self.counter += 1

        if out_file != "":
            self.out_file = out_file

        if output_txt:
            base.data.to_csv(os.path.join(txt_dir, out_file),
                             sep=',',
                             index=False)

        try:
            self.ch_info = s.ch_info
            self.ch_list = s.ch_list
            self.array = s.array
            self.data = base.data.drop_duplicates(subset=['Timestamp'],
                                                  keep='first')
            self.data.reset_index(drop=True, inplace=True)
            base.ch_info['ch'] = base.ch_info['Channel:'].astype(int)
            self.ch_info = base.ch_info.sort_values(by=['ch'])\
                .drop_duplicates(
                    subset=['Serial Number:', 'Channel:', 'Type:', 'Description:', 'Height:', 'Bearing:',
                            'Scale Factor:', 'Offset:', 'Units:'], ignore_index=True).drop(['ch'], 1)

            self.head = s.head
            self.site_info = s.site_info
            self.format_site_data()
            print("\n")

        except UnboundLocalError:
            print("No files match to contatenate.")
            return None
Exemplo n.º 4
0
    def concat_txt(self, txt_dir='', output_txt=False, out_file='',
                   file_filter='', file_filter2='',
                   start_date='1970-01-01', end_date='2150-12-31',
                   progress_bar=True):
        """concatenate files in a folder

        parameters
        ----------
        txt_dir : str
            path to csv or csv.zip files
        output_txt : boolean
            export concatenated data
        out_file : str
            optional, filename of text export
        start_date : str
            yyy-mm-dd formatted string
        end_date : str
            yyy-mm-dd formatted string
        progress_bar : boolean
            show progress bar instead of each file being concatenated

        returns
        -------
        None
            adds data dataframe to reader object
        """
        self.txt_dir = txt_dir
        self.output_txt = output_txt
        self.out_file = out_file
        self.file_filter = file_filter
        self.file_filter2 = file_filter2
        self.start_date = start_date
        self.end_date = end_date

        if check_platform() == 'win32':
            self.txt_dir = windows_folder_path(txt_dir)
        else:
            self.txt_dir = linux_folder_path(txt_dir)

        first_file = True
        files = [
            f for f in sorted(glob(self.txt_dir + "*"))
            if self.file_filter in f and self.file_filter2 in f
            and date_check(self.start_date, self.end_date, f)
        ]

        self.file_count = len(files)
        self.pad = len(str(self.file_count))
        self.counter = 1
        self.start_time = datetime.now()

        for f in files:
            if self.file_filter in f and self.file_filter2 in f:
                if progress_bar:
                    draw_progress_bar(self.counter, self.file_count, self.start_time)
                else:
                    print("Adding {0}/{1}  ...  {2} ".format(str(self.counter).rjust(self.pad), str(self.file_count).ljust(self.pad), f), end="", flush=True)
                if first_file == True:
                    first_file = False
                    try:
                        base = spidar_data_read(f)
                        if progress_bar == False: print("[OK]")
                        pass
                    except IndexError:
                        print('Only standard Spidar headertypes accepted')
                        break
                else:
                    file_path = f
                    try:
                        s = spidar_data_read(file_path)
                        base.data = base.data.append(s.data, sort=False)
                        if not progress_bar:
                            print("[OK]")
                    except Exception as e:
                        if not progress_bar:
                            print("[FAILED]")
                        print("could not concat {0}".format(file_path))
                        print(e)
                        pass
            else:
                pass
            self.counter += 1

        if out_file != "":
            self.out_file = out_file
        if output_txt:
            base.data.to_csv(txt_dir + out_file, sep=',', index=False)

        try:
            self.base = base
            self.heights = base.heights
            self.serial_number = base.serial_number
            self.data = base.data.drop_duplicates(subset=['Timestamp'], keep='first')
            self.data.reset_index(drop=True, inplace=True)
            self.data.reset_index(drop=True, inplace=True)

        except Exception as e:
            print("No files match to contatenate.")
            print(e)
            return None
Exemplo n.º 5
0
    def concat(self,
               output_txt=False,
               out_file='',
               file_filter='',
               filter2='',
               progress_bar=True):
        """combine exported rwd files (in txt format)

        parameters
        ----------
        output_txt : bool
            set to True to save a concatenated text file
        out_file : str
            filepath, absolute or relative
        file_filter : str
        filter2 : str
        progress_bar : bool
        """
        self.file_filter = file_filter

        if self.filter2 == '':
            self.filter2 = filter2

        if check_platform() == 'win32':
            self.txt_dir = windows_folder_path(self.txt_dir)
        else:
            self.txt_dir = linux_folder_path(self.txt_dir)

        first_file = True
        files = sorted(glob(self.txt_dir + '*.txt'))

        self.file_count = len(files)
        self.pad = len(str(self.file_count)) + 1

        self.counter = 1
        self.start_time = datetime.now()

        for f in files:

            if self.file_filter in f and self.filter2 in f:

                if progress_bar:
                    draw_progress_bar(self.counter, self.file_count,
                                      self.start_time)
                else:
                    print("Adding  {0}/{1}  {2}  ...  ".format(
                        str(self.counter).rjust(self.pad),
                        str(self.file_count).ljust(self.pad), f),
                          end="",
                          flush=True)

                if first_file:
                    first_file = False

                    try:
                        base = read_text_data(filename=f,
                                              data_type=self.data_type,
                                              file_filter=self.file_filter,
                                              file_ext=self.file_ext,
                                              sep=self.sep)
                        if not progress_bar:
                            print("[OK]")
                        pass

                    except IndexError:
                        print('Only standard headertypes accepted')
                        break

                else:
                    file_path = f
                    try:
                        s = read_text_data(filename=f,
                                           data_type=self.data_type,
                                           file_filter=self.file_filter,
                                           file_ext=self.file_ext,
                                           sep=self.sep)
                        base.data = base.data.append(s.data, sort=False)
                        if not progress_bar:
                            print("[OK]")
                    except:
                        if not progress_bar:
                            print("[FAILED]")
                        print("could not concat {0}".format(file_path))
                        pass
            else:
                pass
            self.counter += 1

        if output_txt:

            if out_file == "":
                out_file = f"{self.data_type}_" + datetime.today().strftime(
                    "%Y-%m-%d") + ".txt"
            base.data.to_csv(out_file, sep=',', index=False)
            self.out_file = out_file

        try:
            self.ch_info = s.ch_info
            self.ch_list = s.ch_list
            self.data = base.data.drop_duplicates(
                subset=[self.header_sections['data_header']], keep='first')
            self.head = s.head
            self.site_info = s.site_info
            self.filename = s.filename
            self.site_number = self.filename.split("\\")[-1][:4]
            self.format_rwd_site_data()

        except UnboundLocalError:
            print("No files match to contatenate.")
            return None