def _set_time_range(start_date,
                    end_date,
                    delta_time,
                    margin=0.0,
                    start_expansion=False,
                    end_expansion=False):
    if start_date is None and end_date is None:
        raise ValueError("Must select at least start or end")

    if start_date is not None and end_date is not None:
        start = utils.convert_date(start_date)
        end = utils.convert_date(end_date)
    elif start_date is not None:
        start = utils.convert_date(start_date)
        end = start + timedelta(seconds=delta_time)
    else:
        end = utils.convert_date(end_date)
        start = end - timedelta(seconds=delta_time)

    if margin != 0.0:
        interval = end - start
        start = start - margin * interval
        end = end + margin * interval

    return {
        "startDate": datetime.isoformat(start),
        "endDate": datetime.isoformat(end),
        "startExpansion": start_expansion,
        "endExpansion": end_expansion
    }
    def test_convert_date(self):

        # Check if correct timezone information is attached
        date = utils.convert_date(
            "2017-12-15 15:05:43.258077+01:00")  # have offset as set
        self.assertEqual(date.utcoffset(), datetime.timedelta(hours=1))
        date = utils.convert_date(
            "2016-07-29 14:01")  # need to have +2 offset (summer time)
        self.assertEqual(date.utcoffset(), datetime.timedelta(hours=2))
        date = utils.convert_date("2018-11-14 11:17:38.362582"
                                  )  # need to have +1 offset (winter time)
        self.assertEqual(date.utcoffset(), datetime.timedelta(hours=1))
Exemple #3
0
def get_timestamp_from_pulse_id(
        pulse_ids,
        mapping_channel="SIN-CVME-TIFGUN-EVR0:BUNCH-1-OK",
        base_url=None):
    """
    Get global data for a given pulse-id

    :param pulse_ids:           list of pulse-ids to retrieve global date for
    :param mapping_channel:     channel that is used to determine pulse-id<>timestamp mapping
    :param base_url:
    :return:                    list of corresponding global timestamps
    """
    if not isinstance(pulse_ids, list):
        pulse_ids = [pulse_ids]

    dates = []
    for pulse_id in pulse_ids:
        # retrieve raw data - data object needs to contain one object for the channel with one data element
        query = util.construct_data_query(mapping_channel,
                                          start=pulse_id,
                                          range_type="pulseId",
                                          event_fields=["pulseId", "time"])
        data = get_data_json(query, base_url=base_url)

        if not pulse_id == data[0]["data"][0]["pulseId"]:
            raise RuntimeError('Unable to retrieve mapping')

        dates.append(utils.convert_date(data[0]["data"][0]["time"]))

    if len(pulse_ids) != len(dates):
        raise RuntimeError("Unable to retrieve mapping")

    return dates
Exemple #4
0
def calculate_time_range(start_date, end_date, delta_time):
    """
    Calculate start - end range base on given start, end and/or delta parameter. This method accepts strings and will
    return datetime objects

    :param start_date:  start date
    :param end_date:    end date
    :param delta_time:  delta time in seconds
    :return: start, end tuple
    """
    if start_date is None and end_date is None:
        raise ValueError("Must select at least start or end")

    if start_date is not None and end_date is not None:
        start = utils.convert_date(start_date)
        end = utils.convert_date(end_date)
    elif start_date is not None:
        start = utils.convert_date(start_date)
        end = start + timedelta(seconds=delta_time)
    else:
        end = utils.convert_date(end_date)
        start = end - timedelta(seconds=delta_time)

    return start, end
def get_global_date(pulse_ids,
                    mapping_channel="SIN-CVME-TIFGUN-EVR0:BUNCH-1-OK",
                    base_url=default_base_url):
    if not isinstance(pulse_ids, list):
        pulse_ids = [pulse_ids]

    dates = []
    for pulse_id in pulse_ids:
        # retrieve raw data - data object needs to contain one object for the channel with one data element
        data = get_data(mapping_channel,
                        start=pulse_id,
                        range_type="pulseId",
                        mapping_function=lambda d, **kwargs: d,
                        base_url=base_url)
        if not pulse_id == data[0]["data"][0]["pulseId"]:
            raise RuntimeError('Unable to retrieve mapping')

        dates.append(utils.convert_date(data[0]["data"][0]["globalDate"]))

    if len(pulse_ids) != len(dates):
        raise RuntimeError("Unable to retrieve mapping")

    return dates
def cli():
    import argparse

    time_end = datetime.now()
    time_start = time_end - timedelta(minutes=1)

    parser = argparse.ArgumentParser(
        description='Command line interface for the Data API')
    parser.add_argument(
        'action',
        type=str,
        default="",
        help='Action to be performed. Possibilities: search, save')
    parser.add_argument("--regex",
                        type=str,
                        help="String to be searched",
                        default="")
    parser.add_argument("--index-field",
                        type=str,
                        help="field the data is indexed on",
                        default="pulseId",
                        choices=["globalDate", "globalSeconds", "pulseId"])
    parser.add_argument("--from_time",
                        type=str,
                        help="Start time for the data query",
                        default=time_start)
    parser.add_argument("--to_time",
                        type=str,
                        help="End time for the data query",
                        default=time_end)
    parser.add_argument("--from_pulse",
                        type=str,
                        help="Start pulseId for the data query",
                        default=-1)
    parser.add_argument("--to_pulse",
                        type=str,
                        help="End pulseId for the data query",
                        default=-1)
    parser.add_argument("--channels",
                        type=str,
                        help="Channels to be queried, comma-separated list",
                        default="")
    parser.add_argument("--filename",
                        type=str,
                        help="Name of the output file",
                        default="")
    parser.add_argument("--url",
                        type=str,
                        help="Base URL of retrieval API",
                        default=default_base_url)
    parser.add_argument("--overwrite",
                        action="store_true",
                        help="Overwrite the output file",
                        default="")
    # parser.add_argument("--split", action="store_true", help="Split output file", default="")
    parser.add_argument("--split",
                        type=str,
                        help="Number of pulses or duration (ISO8601) per file",
                        default="")
    parser.add_argument(
        "--print",
        help="Prints out the downloaded data. Output can be cut.",
        action="store_true")
    parser.add_argument("--binary",
                        help="Download as binary",
                        action="store_true",
                        default=False)
    parser.add_argument("--start_expansion",
                        help="Expand query to next point before start",
                        action="store_true",
                        default=False)
    parser.add_argument("--end_expansion",
                        help="Expand query to next point after end",
                        action="store_true",
                        default=False)

    args = parser.parse_args()

    split = args.split
    filename = args.filename
    api_base_url = args.url
    binary_download = args.binary
    start_expansion = args.start_expansion
    end_expansion = args.end_expansion
    index_field = args.index_field

    # Check if output files already exist
    if not args.overwrite and filename != "":
        import os.path
        if os.path.isfile(filename):
            logger.error("File %s already exists" % filename)
            return

        n_filename = "%s_%03d.h5" % (re.sub(r"\.h5$", "", filename), 0)
        if os.path.isfile(n_filename):
            logger.error("File %s already exists" % n_filename)
            return

    data = None
    if args.action == "search":
        if args.regex == "":
            logger.error("Please specify a regular expression with --regex")
            parser.print_help()
            return
        # pprint.pprint(search(args.regex, backends=["sf-databuffer", "sf-archiverappliance"], base_url=args.url))
        print(
            json.dumps(search(args.regex, backends=None, base_url=args.url),
                       indent=4))
    elif args.action == "save":
        if args.filename == "" and not args.print:
            logger.warning("Please select either --print or --filename")
            parser.print_help()
            return
        if args.from_pulse != -1:
            if args.to_pulse == -1:
                logger.error("Please set a range limit with --to_pulse")
                return

            start_pulse = int(args.from_pulse)
            file_counter = 0

            while True:

                end_pulse = int(args.to_pulse)

                if start_pulse == end_pulse:
                    break

                if split != "" and filename != "" and (
                        end_pulse - start_pulse) > int(split):
                    end_pulse = start_pulse + int(split)

                if filename != "":
                    if split != "":
                        new_filename = re.sub(r"\.h5$", "", filename)
                        new_filename = "%s_%03d.h5" % (new_filename,
                                                       file_counter)
                    else:
                        new_filename = filename

                if binary_download:
                    get_data_iread(args.channels.split(","),
                                   start=start_pulse,
                                   end=end_pulse,
                                   range_type="pulseId",
                                   index_field=index_field,
                                   filename=new_filename,
                                   base_url=api_base_url,
                                   start_expansion=start_expansion,
                                   end_expansion=end_expansion)

                else:
                    data = get_data(args.channels.split(","),
                                    start=start_pulse,
                                    end=end_pulse,
                                    range_type="pulseId",
                                    index_field=index_field,
                                    base_url=api_base_url,
                                    start_expansion=start_expansion,
                                    end_expansion=end_expansion)

                    if data is not None:
                        if filename != "":
                            to_hdf5(data,
                                    filename=new_filename,
                                    overwrite=args.overwrite)
                        elif args.print:
                            print(data)
                        else:
                            logger.warning(
                                "Please select either --print or --filename")
                            parser.print_help()

                start_pulse = end_pulse
                file_counter += 1
        else:
            start_time = utils.convert_date(args.from_time)
            file_counter = 0

            while True:

                end_time = utils.convert_date(args.to_time)

                if start_time == end_time:
                    break

                if split != "" and filename != "" and (
                        end_time - start_time) > parse_duration(split):
                    end_time = start_time + parse_duration(split)

                if filename != "":
                    if split != "":
                        new_filename = re.sub(r"\.h5$", "", filename)
                        new_filename = "%s_%03d.h5" % (new_filename,
                                                       file_counter)
                    else:
                        new_filename = filename

                if binary_download:
                    get_data_iread(args.channels.split(","),
                                   start=start_time,
                                   end=end_time,
                                   range_type="globalDate",
                                   index_field=index_field,
                                   filename=new_filename,
                                   base_url=api_base_url,
                                   start_expansion=start_expansion,
                                   end_expansion=end_expansion)

                else:
                    data = get_data(args.channels.split(","),
                                    start=start_time,
                                    end=end_time,
                                    range_type="globalDate",
                                    index_field=index_field,
                                    base_url=api_base_url,
                                    start_expansion=start_expansion,
                                    end_expansion=end_expansion)

                    if data is not None:

                        if filename != "":
                            to_hdf5(data,
                                    filename=new_filename,
                                    overwrite=args.overwrite)
                        elif args.print:
                            print(data)
                        else:
                            logger.warning(
                                "Please select either --print or --filename")
                            parser.print_help()

                start_time = end_time
                file_counter += 1
    else:
        parser.print_help()
        return