Ejemplo n.º 1
0
def save_data_iread(query, filename, base_url=None, collector=None):

    if base_url is None:
        base_url = default_base_url

    # Ensure that we request raw events
    # TODO TO BE REMOVED
    if "response" in query:
        # Overwrite whatever is in format
        query["response"]["format"] = "rawevent"
    else:
        query["response"] = util.construct_response(format="rawevent")

    # https://github.psi.ch/sf_daq/idread_specification#reference-implementation
    # https://github.psi.ch/sf_daq/ch.psi.daq.queryrest#rest-interface

    # curl command that can be used for debugging
    logger.info("curl -H \"Content-Type: application/json\" -X POST -d '"+json.dumps(query)+"' "+base_url + '/query')

    if collector is not None:
        serializer = collector
    else:
        serializer = idread_util.HDF5Collector()
        serializer.open(filename)

    stream = False
    if stream:
        with requests.post(base_url + '/query', json=query, stream=stream) as response:
            idread_util.decode(response.raw, collector_function=serializer.add_data)
    else:
        response = requests.post(base_url + '/query', json=query)
        idread_util.decode(io.BytesIO(response.content), collector_function=serializer.add_data)

    if collector is None:
        serializer.close()
Ejemplo n.º 2
0
    def test_decode(self):
        out = self.data / 'out.bin'
        with out.open(mode='rb') as f:
            idread_util.decode(f)

        out2 = self.data / 'out_2.bin'
        with out2.open('rb') as f:
            idread_util.decode(f)

        self.assertTrue(True)
Ejemplo n.º 3
0
    def test_decode_collector(self):
        collector = idread_util.DictionaryCollector()
        tmp = self.data / 'out.bin'

        with tmp.open('rb') as f:
            idread_util.decode(f, collector_function=collector.add_data)

        data = collector.get_data()
        print(len(data[0]["data"]))

        self.assertEqual(600, len(data[0]["data"]))
Ejemplo n.º 4
0
    def test_request(self):
        # This test will fail if the production backend is not available or there is no data for the requested channel

        base_url = "https://data-api.psi.ch/sf"

        end = datetime.datetime.now()
        start = end - datetime.timedelta(minutes=10)
        query = util.construct_data_query(
            channels=["SINEG01-RCIR-PUP10:SIG-AMPLT"],
            start=start,
            end=end,
            response=util.construct_response(format="rawevent"))

        with requests.post(base_url + '/query', json=query,
                           stream=True) as response:
            idread_util.decode(response.raw)

        self.assertTrue(True)
Ejemplo n.º 5
0
def get_data_idread(query, base_url=None):
    """
    Retrieve data in idread format
    :param query:
    :param base_url:
    :return:            The return format is like this
                        [{channel:{}, data:[{pulseId: , value: ...}]}, ]
    """

    # TODO remove and implement correct working
    # if "mapping" in query:
    #     raise RuntimeError("Server side mapping currently not supported with idread")

    supported_event_fields = ['value', 'time', 'timeRaw', 'pulseId', 'status', 'severity']
    # globalSeconds and iocSeconds need to be converted to string!
    # globalDate needs to be generated - remember to hard-code timezone Zurich!

    if "eventFields" in query:
        if not set(query["eventFields"]).issubset(supported_event_fields):
            raise ValueError("Requested event fields are not supported in raw mode. Supported event fields are: " +
                             " ".join(supported_event_fields))

        # Actually all this conversion is not needed as it is not supported by the backend anyway right now

        # convert eventFields into event fields the backend understands
        # supported event_fields by the backend are documented at
        # https://github.psi.ch/sf_daq/idread_specification
        requested_event_fields = query["eventFields"]
        backend_event_fields = []
        for field in requested_event_fields:
            if field == "time" or field == "timeRaw":  # need to convert
                if "globalDate" not in backend_event_fields:
                    backend_event_fields.append("globalDate")  # TODO need to change once supported by backend
            else:
                backend_event_fields.append(field)

        query = dict(query)  # copy the query dict so that the passed query can be reused
        query["eventFields"] = backend_event_fields

    if base_url is None:
        base_url = default_base_url

    # Ensure that we request raw events
    if "response" in query:
        # Overwrite whatever is in format
        query["response"]["format"] = "rawevent"
    else:
        query["response"] = util.construct_response(format="rawevent")

    # https://github.psi.ch/sf_daq/idread_specification#reference-implementation
    # https://github.psi.ch/sf_daq/ch.psi.daq.queryrest#rest-interface

    # curl command that can be used for debugging
    logger.info("curl -H \"Content-Type: application/json\" -X POST -d '"+json.dumps(query)+"' "+base_url + '/query')

    if "mapping" in query:
        collector = idread_util.MappingCollector(len(query["channels"]), event_fields=requested_event_fields)
    else:
        collector = idread_util.DictionaryCollector(event_fields=requested_event_fields)

    stream = False
    if stream:
        with requests.post(base_url + '/query', json=query, stream=stream) as response:
            idread_util.decode(response.raw, collector_function=collector.add_data)
    else:
        response = requests.post(base_url + '/query', json=query)
        idread_util.decode(io.BytesIO(response.content), collector_function=collector.add_data)

    return collector.get_data()