Beispiel #1
0
    def test_ordered_dataset_ORM(self):

        channel_01 = Channel('channel_01', Coords('dummy', (0,0,0)))
        channel_02 = Channel('channel_02', Coords('dummy', (0,0,0)))
        channel_03 = Channel('channel_03', Coords('dummy', (0,0,0)))
        channel_04 = Channel('channel_04', Coords('dummy', (0,0,0)))
        

        fd1 = FloatDelta(channel_01, channel_02, 0.45)
        fd2 = FloatDelta(channel_02, channel_03, 0.25)
        fd3 = FloatDelta(channel_03, channel_04, 0.49)

        #ods = OrderedDataSet(ordered_by="channel_1.name")
        ods = BaseOrderedDataSet('test_ods')
        
        for fd in [fd3, fd1, fd2]:
            ods.append(fd)

        ods.save()

        # now read out of database
        if pyfusion.orm_manager.IS_ACTIVE:
            session = pyfusion.orm_manager.Session()
            db_ods = session.query(BaseOrderedDataSet).first()
            self.assertEqual(db_ods[0].channel_1.name, 'channel_03')
            self.assertEqual(db_ods[1].channel_1.name, 'channel_01')
            self.assertEqual(db_ods[2].channel_1.name, 'channel_02')
Beispiel #2
0
    def test_channel_list(self):

        ch01 = Channel('test_1', Coords('dummy', (0,0,0)))
        ch02 = Channel('test_2', Coords('dummy', (0,0,0)))
        ch03 = Channel('test_3', Coords('dummy', (0,0,0)))
                       
        new_cl = ChannelList([ch01, ch02, ch03])
Beispiel #3
0
 def test_ORM_floatdelta(self):
     """ check that floatdelta can be saved to database"""
     channel_01 = Channel('channel_01', Coords('dummy', (0,0,0)))
     channel_02 = Channel('channel_02', Coords('dummy', (0,0,0)))
     fd = FloatDelta(channel_01, channel_02, 0.45)
     fd.save()
     if pyfusion.orm_manager.IS_ACTIVE:
         session = pyfusion.orm_manager.Session()
         db_fd = session.query(FloatDelta).first()
         self.assertEqual(db_fd.delta, 0.45)
Beispiel #4
0
 def test_coord_transform(self):
     cyl_coords_1 = (1.0,1.0,1.0)
     dummy_coords_1 = Coords('cylindrical',cyl_coords_1)
     dummy_coords_1.load_transform(DummyCoordTransform)
     # The DummyCoordinateTransform should map (a,b,c) -> (2a,3b,4c)
     self.assertEqual(dummy_coords_1.dummy(), (2*cyl_coords_1[0],
                                               3*cyl_coords_1[1],
                                               4*cyl_coords_1[2]))
     # Check again with different ccoordinated.
     cyl_coords_2 = (2.0,1.0,4.0)
     dummy_coords_2 = Coords('cylindrical',cyl_coords_2)
     dummy_coords_2.load_transform(DummyCoordTransform)
     self.assertEqual(dummy_coords_2.dummy(), (2*cyl_coords_2[0],
                                               3*cyl_coords_2[1],
                                               4*cyl_coords_2[2]))
Beispiel #5
0
    def do_fetch(self):
        chan_name = (self.diag_name.split('-'))[-1]  # remove -
        filename_dict = {'diag_name': chan_name, 'shot': self.shot}

        self.basename = path.join(pf.config.get('global', 'localdatapath'),
                                  data_filename % filename_dict)

        files_exist = path.exists(self.basename)
        if not files_exist:
            raise Exception, "file " + self.basename + " not found."
        else:
            signal_dict = newload(self.basename)

        if ((chan_name == array(['MP5', 'HMP13', 'HMP05'])).any()): flip = -1.
        else: flip = 1.
        if self.diag_name[0] == '-': flip = -flip
        #        coords = get_coords_for_channel(**self.__dict__)
        ch = Channel(self.diag_name, Coords('dummy', (0, 0, 0)))
        output_data = TimeseriesData(
            timebase=Timebase(signal_dict['timebase']),
            signal=Signal(flip * signal_dict['signal']),
            channels=ch)
        output_data.meta.update({'shot': self.shot})

        return output_data
Beispiel #6
0
 def test_channels_SQL(self):
     test_coords = Coords('cylindrical',(0.0,0.0,0.0))
     test_ch = Channel('test_1', test_coords)
     test_ch.save()
     if pyfusion.orm_manager.IS_ACTIVE:
         session = pyfusion.orm_manager.Session()
         our_channel = session.query(Channel).first()
         self.assertEqual(our_channel.name, 'test_1')
Beispiel #7
0
 def test_timebase_and_coords(self):
     n_ch = 10
     n_samples = 1024
     timebase = Timebase(np.arange(n_samples)*1.e-6)
     channels = ChannelList(*(Channel('ch_%d' %i, Coords('cylindrical',(1.0,i,0.0))) for i in 2*np.pi*np.arange(n_ch)/n_ch))
     multichannel_data = get_multimode_test_data(channels = channels,
                                                 timebase = timebase,
                                                 noise = 0.5)
Beispiel #8
0
    def test_channellist_ORM(self):

        ch01 = Channel('test_1', Coords('dummy', (0,0,0)))
        ch02 = Channel('test_2', Coords('dummy', (0,0,0)))
        ch03 = Channel('test_3', Coords('dummy', (0,0,0)))

        new_cl = ChannelList(ch03, ch01, ch02)

        new_cl.save()

        # get our channellist
        if pyfusion.orm_manager.IS_ACTIVE:
            session = pyfusion.orm_manager.Session()
            our_channellist = session.query(ChannelList).order_by("id").first()

            self.assertEqual(our_channellist[0].name, 'test_3')
            self.assertEqual(our_channellist[1].name, 'test_1')
            self.assertEqual(our_channellist[2].name, 'test_2')
Beispiel #9
0
def fetch_data_from_file(fetcher):
    prm_dict = read_prm_file(fetcher.basename + ".prm")
    bytes = int(prm_dict['DataLength(byte)'][0])
    bits = int(prm_dict['Resolution(bit)'][0])
    if not (prm_dict.has_key('ImageType')):  #if so assume unsigned
        bytes_per_sample = 2
        dat_arr = Array.array('H')
        offset = 2**(bits - 1)
        dtype = np.dtype('uint16')
    else:
        if prm_dict['ImageType'][0] == 'INT16':
            bytes_per_sample = 2
            if prm_dict['BinaryCoding'][0] == 'offset_binary':
                dat_arr = Array.array('H')
                offset = 2**(bits - 1)
                dtype = np.dtype('uint16')
            elif prm_dict['BinaryCoding'][0] == "shifted_2's_complementary":
                dat_arr = Array.array('h')
                offset = 0
                dtype = np.dtype('int16')
            else:
                raise NotImplementedError, ' binary coding ' + prm_dict[
                    'BinaryCoding']

    fp = open(fetcher.basename + '.dat', 'rb')
    dat_arr.fromfile(fp, bytes / bytes_per_sample)
    fp.close()

    clockHz = None

    if prm_dict.has_key('SamplingClock'):
        clockHz = double(prm_dict['SamplingClock'][0])
    if prm_dict.has_key('SamplingInterval'):
        clockHz = clockHz / double(prm_dict['SamplingInterval'][0])
    if prm_dict.has_key('ClockSpeed'):
        if clockHz != None:
            pyfusion.utils.warn(
                'Apparent duplication of clock speed information')
        clockHz = double(prm_dict['ClockSpeed'][0])
        clockHz = LHD_A14_clk(fetcher.shot)  # see above
    if clockHz != None:
        timebase = arange(len(dat_arr)) / clockHz
    else:
        raise NotImplementedError, "timebase not recognised"

    ch = Channel("%s-%s" % (fetcher.diag_name, fetcher.channel_number),
                 Coords('dummy', (0, 0, 0)))
    if fetcher.gain != None:
        gain = fetcher.gain
    else:
        gain = 1
    output_data = TimeseriesData(timebase=Timebase(timebase),
                                 signal=Signal(gain * dat_arr),
                                 channels=ch)
    output_data.meta.update({'shot': fetcher.shot})

    return output_data
Beispiel #10
0
 def test_dataset_filter_nocopy(self):
     n_ch = 10
     n_samples = 640
     timebase = Timebase(np.arange(n_samples)*1.e-6)
     channels = ChannelList(*(Channel('ch_%d' %i, Coords('cylindrical',(1.0,i,0.0))) for i in 2*np.pi*np.arange(n_ch)/n_ch))
     multichannel_data = get_multimode_test_data(channels = channels,
                                                 timebase = timebase,
                                                 noise = 0.5)
     dataset = multichannel_data.segment(64, copy=False)
     new_dataset = dataset.segment(16, copy=False)
Beispiel #11
0
    def do_fetch(self):
        # TODO support non-signal datatypes
        if self.fetch_mode == 'thin client':
            ch = Channel(self.mds_path_components['nodepath'],
                         Coords('dummy', (0, 0, 0)))
            data = self.acq.connection.get(
                self.mds_path_components['nodepath'])
            dim = self.acq.connection.get('dim_of(%s)' %
                                          self.mds_path_components['nodepath'])
            # TODO: fix this hack (same hack as when getting signal from node)
            if len(data.shape) > 1:
                data = np.array(data)[0, ]
            if len(dim.shape) > 1:
                dim = np.array(dim)[0, ]
            output_data = TimeseriesData(timebase=Timebase(dim),
                                         signal=Signal(data),
                                         channels=ch)
            output_data.meta.update({'shot': self.shot})
            return output_data

        elif self.fetch_mode == 'http':
            data_url = self.acq.server + '/'.join([
                self.mds_path_components['tree'],
                str(self.shot), self.mds_path_components['tagname'],
                self.mds_path_components['nodepath']
            ])

            data = mdsweb.data_from_url(data_url)
            ch = Channel(self.mds_path_components['nodepath'],
                         Coords('dummy', (0, 0, 0)))
            t = Timebase(data.data.dim)
            s = Signal(data.data.signal)
            output_data = TimeseriesData(timebase=t, signal=s, channels=ch)
            output_data.meta.update({'shot': self.shot})
            return output_data

        else:
            node = self.tree.getNode(self.mds_path)
            if int(node.dtype) == 195:
                return get_tsd_from_node(self, node)
            else:
                raise Exception('Unsupported MDSplus node type')
Beispiel #12
0
 def test_timeseries_filter_nocopy(self):
     # Use reduce_time filter for testing...
     n_ch = 10
     n_samples = 5000
     timebase = Timebase(np.arange(n_samples)*1.e-6)
     channels = ChannelList(*(Channel('ch_%d' %i, Coords('cylindrical',(1.0,i,0.0))) for i in 2*np.pi*np.arange(n_ch)/n_ch))
     multichannel_data = get_multimode_test_data(channels = channels,
                                                 timebase = timebase,
                                                 noise = 0.5)
     new_data = multichannel_data.reduce_time([0,1.e-3], copy=False)
     self.assertTrue(new_data is multichannel_data)
Beispiel #13
0
    def test_remove_mean_single_channel(self):
        tb = generate_timebase(t0=-0.5, n_samples=1.e2, sample_freq=1.e2)
        # nonzero signal mean
        tsd = TimeseriesData(timebase=tb,
                             signal=Signal(np.arange(len(tb))),
                             channels=ChannelList(
                                 Channel('ch_01', Coords('dummy', (0, 0, 0)))))

        filtered_tsd = tsd.subtract_mean()

        assert_almost_equal(np.mean(filtered_tsd.signal), 0)
Beispiel #14
0
 def fetch(self):
     tb = generate_timebase(t0=float(self.t0),
                            n_samples=int(self.n_samples),
                            sample_freq=float(self.sample_freq))
     sig = Signal(
         float(self.amplitude) * sin(2 * pi * float(self.frequency) * tb))
     dummy_channel = Channel('ch_01', Coords('dummy', (0, 0, 0)))
     output_data = TimeseriesData(timebase=tb,
                                  signal=sig,
                                  channels=ChannelList(dummy_channel))
     output_data.meta.update({'shot': self.shot})
     return output_data
Beispiel #15
0
    def test_remove_noncontiguous(self):
        tb1 = generate_timebase(t0=-0.5, n_samples=1.e2, sample_freq=1.e2)
        tb2 = generate_timebase(t0=-0.5, n_samples=1.e2, sample_freq=1.e2)
        tb3 = generate_timebase(t0=-0.5, n_samples=1.e2, sample_freq=1.e2)
        # nonzero signal mean
        tsd1 = TimeseriesData(timebase=tb1,
                              signal=Signal(np.arange(len(tb1))),
                              channels=ChannelList(
                                  Channel('ch_01', Coords('dummy',
                                                          (0, 0, 0)))))
        tsd2 = TimeseriesData(timebase=tb2,
                              signal=Signal(np.arange(len(tb2))),
                              channels=ChannelList(
                                  Channel('ch_01', Coords('dummy',
                                                          (0, 0, 0)))))
        tsd3 = TimeseriesData(timebase=tb3,
                              signal=Signal(np.arange(len(tb3))),
                              channels=ChannelList(
                                  Channel('ch_01', Coords('dummy',
                                                          (0, 0, 0)))))

        self.assertTrue(tb1.is_contiguous())
        self.assertTrue(tb2.is_contiguous())
        self.assertTrue(tb3.is_contiguous())
        tsd2.timebase[-50:] += 1.0
        self.assertFalse(tb2.is_contiguous())

        ds = DataSet('ds')
        for tsd in [tsd1, tsd2, tsd3]:
            ds.add(tsd)

        for tsd in [tsd1, tsd2, tsd3]:
            self.assertTrue(tsd in ds)

        filtered_ds = ds.remove_noncontiguous()
        for tsd in [tsd1, tsd3]:
            self.assertTrue(tsd in filtered_ds)

        self.assertFalse(tsd2 in filtered_ds)
Beispiel #16
0
def get_probe_angles(input_data, closed=False):
    """  
    return a list of thetas for a given signal (timeseries) or a string that specifies it.
              get_probe_angles('W7X:W7X_MIRNOV_41_BEST_LOOP:(20180912,43)')

    This is a kludgey way to read coordinates.  Should be through acquisition.base or
    acquisition.'device' rather than looking up config directly
    """
    import pyfusion
    if isinstance(input_data, str):
        pieces = input_data.split(':')
        if len(pieces) == 3:
            dev_name, diag_name, shotstr = pieces
            shot_number = eval(shotstr)
            dev = pyfusion.getDevice(dev_name)
            data = dev.acq.getdata(shot_number, diag_name, time_range=[0, 0.1])
        else:
            from pyfusion.data.timeseries import TimeseriesData, Timebase, Signal
            from pyfusion.data.base import Channel, ChannelList, Coords
            input_data = TimeseriesData(Timebase([0, 1]), Signal([0, 1]))
            dev_name, diag_name = pieces
            # channels are amongst options
            opts = pyfusion.config.pf_options('Diagnostic', diag_name)
            chans = [
                pyfusion.config.pf_get('Diagnostic', diag_name, opt)
                for opt in opts if 'channel_' in opt
            ]
            # for now, assume config_name is some as name
            input_data.channels = ChannelList(
                *[Channel(ch, Coords('?', [0, 0, 0])) for ch in chans])

    Phi = np.array([
        2 * np.pi / 360 * float(
            pyfusion.config.get(
                'Diagnostic:{cn}'.format(
                    cn=c.config_name if c.config_name != '' else c.name),
                'Coords_reduced').split(',')[0]) for c in input_data.channels
    ])

    Theta = np.array([
        2 * np.pi / 360 * float(
            pyfusion.config.get(
                'Diagnostic:{cn}'.format(
                    cn=c.config_name if c.config_name != '' else c.name),
                'Coords_reduced').split(',')[1]) for c in input_data.channels
    ])

    if closed:
        Phi = np.append(Phi, Phi[0])
        Theta = np.append(Theta, Theta[0])
    return (dict(Theta=Theta, Phi=Phi))
Beispiel #17
0
    def do_fetch(self):
        channel_length = int(self.length)
        outdata = np.zeros(1024 * 2 * 256 + 1)
        ##  !! really should put a wrapper around gethjdata to do common stuff
        #  outfile is only needed if the direct passing of binary won't work
        #  with tempfile.NamedTemporaryFile(prefix="pyfusion_") as outfile:
        # get in two steps to make debugging easier
        allrets = gethjdata.gethjdata(self.shot,
                                      channel_length,
                                      self.path,
                                      verbose=VERBOSE,
                                      opt=1,
                                      ierror=2,
                                      isample=-1,
                                      outdata=outdata,
                                      outname='')
        ierror, isample, getrets = allrets
        if ierror != 0:
            raise LookupError(
                'hj Okada style data not found for {s}:{c}'.format(
                    s=self.shot, c=self.path))

        ch = Channel(self.path, Coords('dummy', (0, 0, 0)))

        # the intent statement causes the out var to be returned in the result lsit
        # looks like the time,data is interleaved in a 1x256 array
        # it is fed in as real*64, but returns as real*32! (as per fortran decl)
        debug_(pyfusion.DEBUG,
               4,
               key='Heliotron_fetch',
               msg='after call to getdata')
        # timebase in secs (ms in raw data) - could add a preferred unit?
        # this is partly allowed for in savez_compressed, newload, and
        # for plotting, in the config file.
        # important that the 1e-3 be inside the Timebase()
        output_data = TimeseriesData(timebase=Timebase(
            1e-3 * getrets[1::2][0:isample]),
                                     signal=Signal(getrets[2::2][0:isample]),
                                     channels=ch)
        output_data.meta.update({'shot': self.shot})
        if pyfusion.VERBOSE > 0: print('HJ config name', self.config_name)
        output_data.config_name = self.config_name
        stprms = get_static_params(shot=self.shot, signal=self.path)
        if len(list(stprms)
               ) == 0:  # maybe this should be ignored - how do we use it?
            raise LookupError(
                ' failure to get params for {shot}:{path}'.format(
                    shot=self.shot, path=self.path))
        output_data.params = stprms
        return output_data
Beispiel #18
0
     def do_fetch(self):
         channel_length = int(self.length)
         outdata=np.zeros(1024*2*256+1)
         with tempfile.NamedTemporaryFile(prefix="pyfusion_") as outfile:
             getrets=gethjdata.gethjdata(self.shot,channel_length,self.path,
                                         VERBOSE, OPT,
                                         outfile.name, outdata)
         ch = Channel(self.path,
                      Coords('dummy', (0,0,0)))

         output_data = TimeseriesData(timebase=Timebase(getrets[1::2]),
                                 signal=Signal(getrets[2::2]), channels=ch)
         output_data.meta.update({'shot':self.shot})
         
         return output_data
Beispiel #19
0
def get_tsd_from_node(fetcher, node):
    """Return pyfusion TimeSeriesData corresponding to an MDSplus signal node."""
    # TODO: load actual coordinates
    ch = Channel(fetcher.mds_path_components['nodepath'],
                 Coords('dummy', (0, 0, 0)))
    signal = Signal(node.data())
    dim = node.dim_of().data()
    # TODO: stupid hack,  the test signal has dim  of [[...]], real data
    # has [...].  Figure out  why. (...probably because  original signal
    # uses a build_signal function)
    if len(dim) == 1:
        dim = dim[0]
    timebase = Timebase(dim)
    output_data = TimeseriesData(timebase=timebase, signal=signal, channels=ch)
    output_data.meta.update({'shot': fetcher.shot})
    return output_data
Beispiel #20
0
    def test_svd_data(self):
        n_ch = 10
        n_samples = 1024
        timebase = Timebase(np.arange(n_samples)*1.e-6)
        channels = ChannelList(*(Channel('ch_%02d' %i, Coords('cylindrical',(1.0,i,0.0))) for i in 2*np.pi*np.arange(n_ch)/n_ch))
        multichannel_data = get_multimode_test_data(channels = channels,
                                                    timebase = timebase,
                                                    noise = 0.5)

        test_svd = multichannel_data.svd()
        self.assertTrue(isinstance(test_svd, SVDData))
        self.assertEqual(len(test_svd.topos[0]), n_ch)
        self.assertEqual(len(test_svd.chronos[0]), n_samples)
        assert_array_almost_equal(test_svd.chrono_labels, timebase)
        for c_i, ch in enumerate(channels):
            self.assertEqual(ch, test_svd.channels[c_i])
Beispiel #21
0
    def test_flucstruc_phases(PfTestCase):
        
        n_ch = 10
        n_samples = 5000
        timebase = Timebase(np.arange(n_samples)*1.e-6)
        channels = ChannelList(*(Channel('ch_%d' %i, Coords('cylindrical',(1.0,i,0.0))) for i in 2*np.pi*np.arange(n_ch)/n_ch))
        multichannel_data = get_multimode_test_data(channels = channels,
                                                    timebase = timebase,
                                                    noise = 0.5)

        data_reduced_time=multichannel_data.reduce_time([0,0.002]).subtract_mean().normalise(method='v',separate=True)

        fs_set=data_reduced_time.flucstruc()
        phases = []
        for fs in fs_set:
            for j in range(0,len(fs.dphase)):
                phases.append(fs.dphase[j].delta)
Beispiel #22
0
    def do_fetch(self):
        print self.shot, self.senal
        data_dim = tjiidata.dimens(self.shot, self.senal)
        if data_dim[0] < MAX_SIGNAL_LENGTH:
            data_dict = tjiidata.lectur(self.shot, self.senal, data_dim[0],
                                        data_dim[0], data_dim[1])
        else:
            raise ValueError, 'Not loading data to avoid segmentation fault in tjiidata.lectur'
        ch = Channel(self.senal, Coords('dummy', (0, 0, 0)))

        if self.invert == 'true':  #yuk - TODO: use boolean type from config
            s = Signal(-np.array(data_dict['y']))
        else:
            s = Signal(np.array(data_dict['y']))

        output_data = TimeseriesData(timebase=Timebase(data_dict['x']),
                                     signal=s,
                                     channels=ch)
        output_data.meta.update({'shot': self.shot})
        return output_data
Beispiel #23
0
    def do_fetch(self):
        chan_name = (self.diag_name.split('-'))[-1]  # remove -
        filename_dict = {'shot':self.shot, # goes with Boyd's local stg
                         'config_name':self.config_name}

        #filename_dict = {'diag_name':self.diag_name, # goes with retrieve names
        #                 'channel_number':self.channel_number,
        #                 'shot':self.shot}

        debug_(pf.DEBUG, 4, key='local_fetch')
        for each_path in pf.config.get('global', 'localdatapath').split('+'):
            self.basename = path.join(each_path, data_filename %filename_dict)
    
            files_exist = path.exists(self.basename)
            if files_exist: break

        if not files_exist:
            raise Exception("file {fn} not found. (localdatapath was {p})"
                            .format(fn=self.basename, 
                                    p=pf.config.get('global', 
                                                    'localdatapath').split('+')))
        else:
            signal_dict = newload(self.basename)
            
        if ((chan_name == array(['MP5','HMP13','HMP05'])).any()):  
            flip = -1.
            print('flip')
        else: flip = 1.
        if self.diag_name[0]=='-': flip = -flip
#        coords = get_coords_for_channel(**self.__dict__)
        ch = Channel(self.diag_name,  Coords('dummy', (0,0,0)))
        output_data = TimeseriesData(timebase=Timebase(signal_dict['timebase']),
                                 signal=Signal(flip*signal_dict['signal']), channels=ch)
        # bdb - used "fetcher" instead of "self" in the "direct from LHD data" version
        output_data.config_name = self.config_name  # when using saved files, same as name
        output_data.meta.update({'shot':self.shot})

        return output_data
Beispiel #24
0
 def test_add_coords(self):
     dummy_coords = Coords('cylindrical',(1.0,1.0,1.0))
     self.assertEqual(dummy_coords.cylindrical, (1.0,1.0,1.0))
     dummy_coords.add_coords(cartesian=(0.1,0.5,0.2))
     self.assertEqual(dummy_coords.cartesian, (0.1,0.5,0.2))
Beispiel #25
0
 def test_channel_class(self):
     test_coords = Coords('cylindrical',(0.0,0.0,0.0))
     test_ch = Channel('test_channel', test_coords)
     self.assertEqual(test_ch.name, 'test_channel')
     self.assertEqual(test_ch.coords, test_coords)
Beispiel #26
0
import bz2
from pyfusion.acquisition.base import BaseDataFetcher
from pyfusion.data.timeseries import Signal, Timebase, TimeseriesData
from pyfusion.data.base import Coords, Channel, ChannelList

import numpy as np
#####

# Generate generic channel with dummy coordinates.
generic_ch = lambda x: Channel('channel_%03d' %
                               (x + 1), Coords('dummy', (x, 0, 0)))

named_ch = lambda x: Channel(x, Coords('dummy', (x, 0, 0)))


class BinaryMultiChannelTimeseriesFetcher(BaseDataFetcher):
    """Fetch binary data from specified filename.

    
    This data fetcher uses two configuration parameters, filename (required) and a dtype specification

    The filename parameter can include a substitution string ``(shot)`` which will be replaced with the shot number.

    dtype will be evaluated as string, numpy can be used with np namespace
    e.g. np.float32

    """
    def read_dtype(self):
        dtype = self.__dict__.get("dtype", None)
        return eval(dtype)
Beispiel #27
0
"""Data fetcher class for delimiter-separated value (DSV) data."""

from pyfusion.acquisition.base import BaseDataFetcher
from pyfusion.data.timeseries import Signal, Timebase, TimeseriesData
from pyfusion.data.base import Coords, Channel, ChannelList

from numpy import genfromtxt

# Generate generic channel with dummy coordinates.
generic_ch = lambda x: Channel('channel_%03d' %
                               (x + 1), Coords('dummy', (x, 0, 0)))


class DSVMultiChannelTimeseriesFetcher(BaseDataFetcher):
    """Fetch DSV data from specified filename.

    
    This data fetcher uses two configuration parameters, filename (required) and delimiter (optioanl).

    The filename parameter can include a substitution string ``(shot)`` which will be replaced with the shot number.

    By default, whitespace is used for the delimiter character (if the delimiter parameter is not provided.)
    """
    def do_fetch(self):
        delimiter = self.__dict__.get("delimiter", None)
        data = genfromtxt(self.filename.replace("(shot)", str(self.shot)),
                          unpack=True,
                          delimiter=delimiter)

        # len(data) is number of channels + 1 (timebase)
        n_channels = len(data) - 1
Beispiel #28
0
def get_basic_diagnostics(diags=None,
                          shot=54196,
                          times=None,
                          delay=None,
                          exception=False,
                          debug=0):
    """ return a list of np.arrays of normally numeric values for the 
    times given, for the given shot.
    Will access server if env('IGETFILE') points to an exe, else accesses cache
    """

    global HJ_summary
    # if no exception given and we are not debugging
    # note - exception=None is a valid entry, meaning tolerate no exceptions
    # so the "default" we use is False
    if exception == False and debug == 0: exception = Exception

    if diags is None: diags = "<n_e19>,b_0,i_p,w_p,dw_pdt,dw_pdt2".split(',')
    if len(np.shape(diags)) == 0: diags = [diags]
    # LHD only    if delay is None: delay = get_delay(shot)

    if times is None:
        times = np.linspace(0, 4, 4000)

    times = np.array(times)
    vals = {}
    # create an extra time array to allow a cross check
    vals.update({'check_tm': times})
    vals.update({'check_shot': np.zeros(len(times), dtype=np.int) + shot})
    debug_(pyfusion.DEBUG, 2, key='get_basic')
    for diag in diags:
        if not (diag in file_info):
            warn('diagnostic {0} not found in shot {1}'.format(diag, shot),
                 stacklevel=2)
            vals.update({diag: np.nan + times})
            debug_(pyfusion.DEBUG, 2, key='get_basic')
        else:
            info = file_info[diag]
            varname = info['name']
            infofmt = info['format']
            subfolder = infofmt.split('@')[0]
            filepath = os.path.sep.join(
                [localigetfilepath, subfolder, infofmt])
            if ':' in varname: (oper, varname) = varname.split(':')
            else: oper = None

            if '(' in varname:
                try:
                    left, right = varname.split('(')
                    varname, rest = right.split(')')
                except:
                    raise ValueError(
                        'in expression {v} - parens?'.format(varname))
            if infofmt.find('.npz') > 0:
                try:
                    test = HJ_summary.keys()
                except:
                    csvfilename = acq_HJ + '/' + infofmt
                    if pyfusion.DBG() > 1:
                        print('looking for HeliotronJ summary in' +
                              csvfilename)
                    print('reloading {0}'.format(csvfilename))
                    HJ_summary = np.load(csvfilename)

                val = HJ_summary[varname][shot]
                valarr = np.double(val) + (times * 0)
            elif 'get_static_params' in infofmt:
                pdicts = eval(infofmt.format(shot=shot))
                if len(pdicts) == 0:
                    print('empty dictionary returned')

                val = pdicts[varname]
                valarr = np.double(val) + (times * 0)
            else:  # read signal from data system
                debug_(max(pyfusion.DEBUG, debug), level=4, key='find_data')
                try:
                    #get HJparams
                    channel = info['name']
                    outdata = np.zeros(1024 * 2 * 256 + 1)
                    channel_length = (len(outdata) - 1) / 2
                    # outdfile only needed for opt=1 (get data via temp file)
                    # with tempfile.NamedTemporaryFile(prefix="pyfusion_") as outdfile:
                    ierror, getrets = gethjdata.gethjdata(shot,
                                                          channel_length,
                                                          info['name'],
                                                          verbose=VERBOSE,
                                                          opt=1,
                                                          ierror=2,
                                                          outdata=outdata,
                                                          outname='')

                    if ierror != 0:
                        raise LookupError('data not found for {s}:{c}'.format(
                            s=shot, c=channel))
                    ch = Channel(info['name'], Coords('dummy', (0, 0, 0)))
                    # timebase in secs (was ms in raw data)
                    dg = TimeseriesData(timebase=Timebase(1e-3 *
                                                          getrets[1::2]),
                                        signal=Signal(getrets[2::2]),
                                        channels=ch)
                except exception as reason:
                    if debug > 0:
                        print('exception running gethjdata {r} {a}',
                              format(r=reason, a=reason.args))
                    dg = None
                    #break  # give up and try next diagnostic
                if dg is None:  # messy - break doesn't do what I want?
                    valarr = None
                else:
                    nd = 1  # initially only deal with single channels (HJ)
                    # get the column(s) of the array corresponding to the name
                    w = [0]
                    if (oper in 'sum,average,rms,max,min'.split(',')):
                        if oper == 'sum': op = np.sum
                        elif oper == 'average': op = np.average
                        elif oper == 'min': op = np.min
                        elif oper == 'std': op = np.std
                        else:
                            raise ValueError(
                                'operator {o} in {n} not known to get_basic_diagnostics'
                                .format(o=oper, n=info['name']))
                        # valarr = op(dg.data[:,nd+w],1)
                        valarr = op(dg.data[:, nd + w], 1)
                    else:
                        if len(w) != 1:
                            raise LookupError(
                                'Need just one instance of variable {0} in {1}'
                                .format(varname, dg.filename))
                        dg.data = dg.signal  # fudge compatibility
                        if len(np.shape(dg.data)) != 1:  # 2 for igetfile
                            raise LookupError(
                                'insufficient data for {0} in {1}'.format(
                                    varname, dg.filename))

                        #valarr = dg.data[:,nd+w[0]]

                    #tim =  dg.data[:,0] - delay
                    valarr = dg.signal
                    tim = dg.timebase

                    # fudge until we can gete the number of points
                    valarr = valarr[:np.argmax(tim)]
                    tim = tim[:np.argmax(tim)]

                    if oper == 'ddt':  # derivative operator
                        valarr = np.diff(valarr) / (np.average(np.diff(tim)))
                        tim = (tim[0:-1] + tim[1:]) / 2.0

                    if oper == 'ddt2':  # abd(ddw)*derivative operator
                        dw = np.diff(valarr) / (np.average(np.diff(tim)))
                        ddw = np.diff(dw) / (np.average(np.diff(tim)))
                        tim = tim[2:]
                        valarr = 4e-6 * dw[1:] * np.abs(ddw)

                    if (len(tim) < 10) or (np.std(tim) < 0.1):
                        raise ValueError('Insufficient points or degenerate'
                                         'timebase data in {0}, {1}'.format(
                                             varname, dg.filename))

                    valarr = (stineman_interp(times, tim, valarr))
                    w = np.where(times > max(tim))
                    valarr[w] = np.nan

            if valarr is not None: vals.update({diag: valarr})
    debug_(max(pyfusion.DEBUG, debug), level=5, key='interp')
    return (vals)
Beispiel #29
0
    def test_d_phase(self):
        channel_01 = Channel('channel_01', Coords('dummy', (0,0,0)))
        channel_02 = Channel('channel_02', Coords('dummy', (0,0,0)))

        fd = FloatDelta(channel_01, channel_02, 0.45)
Beispiel #30
0
def get_n_channels(n_ch):
    """Return a list of n_ch channels."""
    poloidal_coords = 2*np.pi*np.arange(n_ch)/n_ch
    channel_gen = (Channel('ch_%02d' %i, Coords('cylindrical', (1.0,i,0.0)))
                   for i in poloidal_coords)
    return ChannelList(*channel_gen)