Beispiel #1
0
def test_splittime() -> None:
    assert next(
        f"{t1}, {t2}, {h1}, {h2}"
        for t1, t2, h1, h2 in split_times(
            to_datetime("2018-01-12 12:00"),
            to_datetime("2018-01-12 14:00"),
            by=timedelta(minutes=30),
        )
    ) == (
        "2018-01-12 12:00:00+00:00, 2018-01-12 14:00:00+00:00, "
        "2018-01-12 12:00:00+00:00, 2018-01-12 12:30:00+00:00"
    )
Beispiel #2
0
def to_czml(
    traffic: Union[Traffic, SO6],
    filename: Union[str, Path],
    minimum_time: Optional[timelike] = None,
) -> None:
    """Generates a CesiumJS scenario file."""

    if isinstance(traffic, Traffic):
        if "baro_altitude" in traffic.data.columns:
            traffic = traffic.query("baro_altitude == baro_altitude")
        elif "altitude" in traffic.data.columns:
            traffic = traffic.query("altitude == altitude")

    if minimum_time is not None:
        minimum_time = to_datetime(minimum_time)
        traffic = cast(Traffic,
                       traffic.query(f"timestamp >= '{minimum_time}'"))

    if isinstance(filename, str):
        filename = Path(filename)

    if not filename.parent.exists():
        filename.parent.mkdir(parents=True)

    start = format_ts(traffic.start_time)
    availability = f"{start}/{format_ts(traffic.end_time)}"
    export = [{
        "id": "document",
        "name": f"Traffic_{start}",
        "version": "1.0",
        "author": getpass.getuser(),
        "clock": {
            "interval": availability,
            "currentTime": start,
            "multiplier": _CZML_Params.default_time_multiplier,
        },
    }]
    for flight in traffic:
        for elt in export_flight(flight):
            export.append(elt)

    with filename.open("w") as fh:
        json.dump(export, fh, indent=2)

    logging.info(f"Scenario file {filename} written")
Beispiel #3
0
def to_bluesky(
    traffic: Traffic,
    filename: Union[str, Path],
    minimum_time: Optional[timelike] = None,
) -> None:
    """Generates a Bluesky scenario file."""

    if minimum_time is not None:
        minimum_time = to_datetime(minimum_time)
        traffic = traffic.query(f"timestamp >= '{minimum_time}'")

    if isinstance(filename, str):
        filename = Path(filename)

    if not filename.parent.exists():
        filename.parent.mkdir(parents=True)

    altitude = ("baro_altitude"
                if "baro_altitude" in traffic.data.columns else "altitude")

    if "mdl" not in traffic.data.columns:
        traffic = aircraft.merge(traffic)

    if "cas" not in traffic.data.columns:
        traffic = Traffic(
            traffic.data.assign(cas=vtas2cas(traffic.data.ground_speed,
                                             traffic.data[altitude])))

    with filename.open("w") as fh:
        t_delta = traffic.data.timestamp - traffic.start_time
        data = (traffic.assign_id().data.groupby("flight_id").filter(
            lambda x: x.shape[0] > 3).assign(
                timedelta=t_delta.apply(fmt_timedelta)).sort_values(
                    by="timestamp"))

        for column in data.columns:
            data[column] = data[column].astype(np.str)

        is_created: List[str] = []
        is_deleted: List[str] = []

        start_time = cast(pd.Timestamp, traffic.start_time).time()
        fh.write(f"00:00:00> TIME {start_time}\n")

        # Add some bluesky command for the visualisation
        # fh.write("00:00:00>trail on\n")
        # fh.write("00:00:00>ssd conflicts\n")

        # We remove an object when it's its last data point
        buff = data.groupby("flight_id").timestamp.max()
        dd = pd.DataFrame(columns=["timestamp"],
                          data=buff.values,
                          index=buff.index.values)
        map_icao24_last_point = {}
        for i, v in dd.iterrows():
            map_icao24_last_point[i] = v[0]

        # Main loop to write lines in the scenario file
        for _, v in data.iterrows():
            if v.flight_id not in is_created:
                # If the object is not created then create it
                is_created.append(v.flight_id)
                fh.write(f"{v.timedelta}> CRE {v.callsign} {v.mdl} "
                         f"{v.latitude} {v.longitude} {v.track} "
                         f"{v[altitude]} {v.cas}\n")

            elif v.timestamp == map_icao24_last_point[v.flight_id]:
                # Remove an aircraft when no data are available
                if v.flight_id not in is_deleted:
                    is_deleted.append(v.flight_id)
                    fh.write(f"{v.timedelta}> DEL {v.callsign}\n")

            elif v.flight_id not in is_deleted:
                # Otherwise update the object position
                fh.write(f"{v.timedelta}> MOVE {v.callsign} "
                         f"{v.latitude} {v.longitude} {v[altitude]} "
                         f"{v.track} {v.cas} {v.vertical_rate}\n")

        logging.info(f"Scenario file {filename} written")
def compute_stats(
    input_file: Path,
    output_file: Optional[Path],
    sector_list: List[str],
    max_workers: int,
    interval: int,
    starting_from: Optional[str],
    ending_at: Optional[str],
) -> pd.DataFrame:

    so6 = SO6.from_file(input_file.as_posix())
    if so6 is None:
        raise RuntimeError

    total: List[Dict[str, int]] = []

    if starting_from is None:
        start_time = so6.data.time1.min()
    else:
        start_time = max(to_datetime(starting_from), so6.data.time1.min())
    if ending_at is None:
        end_time = so6.data.time2.max()
    else:
        end_time = min(to_datetime(ending_at), so6.data.time2.max())

    if end_time < start_time:
        msg = f"End time {end_time} is anterior to start time {start_time}"
        raise ValueError(msg)

    # First clip
    so6 = so6.between(start_time, end_time)

    delta = timedelta(minutes=interval)
    size_range = int((end_time - start_time) / delta) + 1
    time_list = [start_time + i * delta for i in range(size_range)]

    all_sectors = [nm_airspaces[airspace] for airspace in sector_list]
    so6 = so6.inside_bbox(
        cascaded_union([s.flatten() for s in all_sectors if s is not None])
    )

    for start_ in tqdm(time_list):
        subset = so6.between(start_, delta)
        args = {}
        # subset serializes well as it is much smaller than so6
        # => no multiprocess on so6!!
        with ProcessPoolExecutor(max_workers=max_workers) as executor:
            tasks = {
                executor.submit(occupancy, subset, sector): sector.name
                for sector in all_sectors
                if sector is not None
            }
            for future in as_completed(tasks):
                conf = tasks[future]
                try:
                    args[conf] = future.result()
                except Exception as e:
                    print(f"Exception {e} raised on {conf}")
        total.append(args)

    stats = pd.DataFrame.from_dict(total)
    stats.index = time_list

    if output_file is not None:
        if output_file.suffix == ".pkl":
            stats.to_pickle(output_file.as_posix())
        elif output_file.suffix == ".csv":
            stats.to_csv(output_file.as_posix())
        elif output_file.suffix == ".xlsx":
            stats.to_excel(output_file.as_posix())
        else:
            print(stats)

    return stats