def test_retrieve_merge(
            self):  # Only works if the testserver.py server is running
        now = datetime.datetime.now()
        end = now
        start = end - datetime.timedelta(minutes=10)

        data = api.get_data(
            channels=["A", "B"],
            start=start,
            end=end,
            base_url="http://localhost:8080/archivertestdatamerge")
        print(data)

        # Test function returns 10 datapoints with values from 0 to 9
        self.assertEqual(data.shape[0], 20)

        counter = 0
        for i in range(20):
            if i % 2 == 0:
                self.assertEqual(data["A"][i], counter)
                counter += 1
            else:
                self.assertTrue(math.isnan(data["A"][i]))

        print(data["A"])
    def test_real_raw(
        self
    ):  # Only works if archiver is accessible and data is available for used channel
        # Retrieve data from the archiver

        now = datetime.datetime.now()
        end = now - datetime.timedelta(minutes=1)
        start = end - datetime.timedelta(minutes=1)

        data = api.get_data(
            channels=[
                # 'sf-archiverappliance/S10CB02-CVME-ILK:CENTRAL-CORETEMP',
                # 'sf-archiverappliance/S10CB02-CVME-ILK:CENTRAL-CORETEMP2',
                'sf-databuffer/S10CB01-RKLY-DCP10:FOR-AMPLT-MAX',
                'sf-databuffer/S10CB01-RKLY-DCP10:REF-AMPLT-MAX',
                # 'sf-archiverappliance/S10CB01-CVME-ILK:P2020-CORETEMP'
            ],
            start=start,
            end=end,
            mapping_function=lambda d, **kwargs: d,
            server_side_mapping=True,
            server_side_mapping_strategy="fill-null")

        print(data)
        self.assertTrue(True)
Beispiel #3
0
    def get_data_time_range(self,
                            channels=[],
                            start=None,
                            end=None,
                            plot=False):
        if not end:
            end = datetime.datetime.now()
            if isinstance(start, datetime.timedelta):
                start = end + start
            elif isinstance(start, dict):
                start = datetime.timedelta(**start)
                start = end + start
            elif isinstance(start, Number):
                start = datetime.timedelta(seconds=start)
                start = end + start

        data = get_data(channels, start=start, end=end, range_type="time")
        if plot:
            ah = plt.gca()
            for chan in channels:
                sel = ~np.isnan(data[chan])
                x = data.index[sel]
                y = data[chan][sel]
                ah.step(x, y, ".-", label=chan, where="post")
                plt.xticks(rotation=30)
                plt.legend()
                plt.tight_layout()
                plt.xlabel(data.index.name)
        return data
Beispiel #4
0
    def test(self):
        now = datetime.datetime.now()
        end = now
        start = end - datetime.timedelta(minutes=10)

        data = api.get_data(channels=["A", "B"],
                            start=start,
                            end=end,
                            base_url="http://localhost:8080/archivertestdata")

        api.to_hdf5(data,
                    filename=self.fname,
                    overwrite=False,
                    compression="gzip",
                    compression_opts=5,
                    shuffle=True)
        data_readback = api.from_hdf5(self.fname, index_field="globalSeconds")

        # This set the index to 'globalSeconds' - this will drop the previous index 'globalDate'
        data.set_index("globalSeconds", inplace=True)
        # Set the column order in readback data frame to the order of the data data frame
        data_readback = data_readback[data.columns.tolist()]

        print(data.head())
        print(data_readback.head())

        self.assertTrue((data_readback.dropna() == data.dropna()).all().all())
def dapi_get(channels, start=None, end=None):
    start = start if start is not None else dict(seconds=0)
    end = end if end is not None else dict(seconds=1)

    start_time_delta = timedelta(**start)
    end_time_delta = timedelta(**end)

    now = datetime.now()
    start = now + start_time_delta
    end = start + end_time_delta

    return dapi.get_data(channels=channels, start=start, end=end)
    def test_real_aggregation(self):
        now = datetime.datetime.now() - datetime.timedelta(hours=10)
        data = api.get_data([
            "SINDI01-RIQM-DCP10:FOR-PHASE-AVG",
            "S10CB01-RBOC-DCP10:FOR-PHASE-AVG"
        ],
                            start=now,
                            delta_range=100,
                            index_field="pulseId",
                            aggregation=Aggregation(nr_of_bins=100))

        self.assertEqual(data.shape[0], 100)
        print(data)
Beispiel #7
0
 def db(
         self,
         channel_list=None,
         start_time_delta=dict(),
         end_time_delta=dict(),
         default_path=True,
 ):
     if not channel_list:
         print("No channels specified, using default list '%s' instead." %
               list(self._default_channel_list.keys())[0])
         channel_list = self._default_channel_list[list(
             self._default_channel_list.keys())[0]]
     now = datetime.datetime.now()
     end = now - datetime.timedelta(**end_time_delta)
     start = end - datetime.timedelta(**start_time_delta)
     return api.get_data(channels=channel_list, start=start, end=end)
    def test_real(
        self
    ):  # Only works if archiver is accessible and data is available for used channel
        # Retrieve data from the archiver

        now = datetime.datetime.now()
        end = now - datetime.timedelta(minutes=1)
        start = end - datetime.timedelta(hours=12)

        data = api.get_data(channels=[
            'sf-archiverappliance/S10CB02-CVME-ILK:CENTRAL-CORETEMP',
            'sf-archiverappliance/S10CB02-CVME-ILK:CENTRAL-CORETEMP2'
        ],
                            start=start,
                            end=end)

        print(data)
        self.assertTrue(True)
    def test_retrieve(
            self):  # Only works if the testserver.py server is running
        now = datetime.datetime.now()
        end = now
        start = end - datetime.timedelta(minutes=10)

        data = api.get_data(channels=["A", "B"],
                            start=start,
                            end=end,
                            base_url="http://localhost:8080/archivertestdata")
        print(data)

        # Test function returns 10 datapoints with values from 0 to 9
        self.assertEqual(data.shape[0], 10)

        for i in range(10):
            self.assertEqual(data["A"][i], i)

        print(data["A"])
Beispiel #10
0
    def get_data_pulse_id_range(self,
                                channels=[],
                                start=None,
                                end=None,
                                plot=False):
        if not end:
            if hasattr(self, "pulse_id"):
                end = int(self.pulse_id.get_current_value())
            else:
                raise Exception("no end pulse id provided")
            start = start + end
        data = get_data(channels, start=start, end=end, range_type="pulseId")
        if plot:
            ah = plt.gca()
            for chan in channels:
                sel = ~np.isnan(data[chan])
                x = data.index[sel]
                y = data[chan][sel]
                ah.plot(x, y, ".-", label=chan, where="post")
            plt.tight_layout()

        return data
Beispiel #11
0
layers_free = free_phase(n_it_neg, epsilon)
_, loss = weakly_clamped_phase(layers_free, n_it_pos, epsilon, beta)

opt = tf.train.GradientDescentOptimizer(0.0001)
# opt = tf.train.AdamOptimizer(0.0001)

tvars = tf.trainable_variables()
grads = tf.gradients(loss, tvars)
train_step = opt.apply_gradients(zip(grads, tvars))


sess = tf.Session()
sess.run(tf.global_variables_initializer())


x_values, y_values = get_data()

n_batches_train = (4 * x_values.shape[0]/5) / batch_size
n_batches_valid = (1 * x_values.shape[0]/5) / batch_size

layers_vals = [
    None,
    np.zeros((batch_size, hidden_size)),
    np.zeros((batch_size, output_size)),
]

for e in xrange(20):
    stat = []
    for index in xrange(n_batches_train):
        x_v = x_values[index * batch_size: (index + 1) * batch_size]
        y_v = y_values[index * batch_size: (index + 1) * batch_size]
import data_api as api
import datetime
import pandas as pd

start='2018-12-10 00:00:00.000'
end  ='2018-12-18 00:00:00.000'
channels = ['ABK1:IST:2','MHC1:IST:2']
startsec = 1544396409.142
endsec   = 1545087549.139
# standard method
data = api.get_data(channels=channels, start=start, end=end, base_url='https://data-api.psi.ch/hipa')

# fixed time 1 hour interval, with padding
data1h = api.get_data(channels=channels, start=start, end=end, base_url='https://data-api.psi.ch/hipa', fixed_time=True, fixed_time_interval='1 H')

# fixed time 1 day interval, with padding
data10 = api.get_data(channels=channels, start=start, end=end, base_url='https://data-api.psi.ch/hipa', fixed_time=True, fixed_time_interval='1 D')

# same with backpadding
data10back = api.get_data(channels=channels, start=start, end=end, base_url='https://data-api.psi.ch/hipa', fixed_time=True, fixed_time_interval='1 D', interpolation_method='previous')

# same with linear interpolation
data10lin = api.get_data(channels=channels, start=start, end=end, base_url='https://data-api.psi.ch/hipa', fixed_time=True, fixed_time_interval='1 D', interpolation_method='linear')

# same with nearest neighbour
data10nn = api.get_data(channels=channels, start=start, end=end, base_url='https://data-api.psi.ch/hipa', fixed_time=True, fixed_time_interval='1 D', interpolation_method='nearest')

# check with range_type = "globalSeconds"
datasec = api.get_data(channels=channels, start=startsec, end=endsec, base_url='https://data-api.psi.ch/hipa', range_type="globalSeconds")

# check with range_type = "globalSeconds"
Beispiel #13
0

layers_free = free_phase(n_it_neg, epsilon)
_, loss = weakly_clamped_phase(layers_free, n_it_pos, epsilon, beta)

opt = tf.train.GradientDescentOptimizer(0.0001)
# opt = tf.train.AdamOptimizer(0.0001)

tvars = tf.trainable_variables()
grads = tf.gradients(loss, tvars)
train_step = opt.apply_gradients(zip(grads, tvars))

sess = tf.Session()
sess.run(tf.global_variables_initializer())

x_values, y_values = get_data()

n_batches_train = (4 * x_values.shape[0] / 5) / batch_size
n_batches_valid = (1 * x_values.shape[0] / 5) / batch_size

layers_vals = [
    None,
    np.zeros((batch_size, hidden_size)),
    np.zeros((batch_size, output_size)),
]

for e in xrange(20):
    stat = []
    for index in xrange(n_batches_train):
        x_v = x_values[index * batch_size:(index + 1) * batch_size]
        y_v = y_values[index * batch_size:(index + 1) * batch_size]