Ejemplo n.º 1
0
def parse(report: str, issued: date = None) -> PirepData:
    """
    Returns a PirepData object based on the given report
    """
    if not report:
        return None
    sanitized = sanitization.sanitize_report_string(report)
    # NOTE: will need to implement PIREP-specific list clean
    resp = {
        "raw": report,
        "sanitized": sanitized,
        "station": None,
        "remarks": None
    }
    data = sanitized.split("/")
    resp.update(_root(data.pop(0).strip()))
    for item in data:
        if not item or len(item) < 2:
            continue
        tag = item[:2]
        item = item[2:].strip()
        if tag == "TM":
            resp["time"] = _time(item, issued)
        elif tag in _HANDLERS:
            key, handler = _HANDLERS[tag]
            resp[key] = handler(item)
        elif tag in _DICT_HANDLERS:
            resp.update(_DICT_HANDLERS[tag](item))
    return PirepData(**resp)
Ejemplo n.º 2
0
 def test_sanitize_report_string(self):
     """
     Tests a function which fixes common mistakes while the report is a string
     """
     line = "KJFK 36010 ? TSFEW004SCT012FEW///CBBKN080 C A V O K A2992"
     fixed = "KJFK 36010   TS FEW004 SCT012 FEW///CB BKN080 CAVOK A2992"
     self.assertEqual(sanitization.sanitize_report_string(line), fixed)
Ejemplo n.º 3
0
def parse(report: str, issued: date = None) -> AirepData:
    """"""
    if not report:
        return None
    clean = sanitization.sanitize_report_string(report)
    wxdata = sanitization.sanitize_report_list(clean.split())
    wxresp = {"raw": report, "sanitized": " ".join(wxdata)}
    print(wxdata)
    print(wxresp)
    return None
Ejemplo n.º 4
0
def sanitize(report: str) -> Tuple[str, str, List[str]]:
    """Returns a sanitized report, remarks, and elements ready for parsing"""
    clean = sanitization.sanitize_report_string(report)
    data, remark_str = get_remarks(clean)
    data = core.dedupe(data)
    data = sanitization.sanitize_report_list(data)
    clean = " ".join(data)
    if remark_str:
        clean += " " + remark_str
    return clean, remark_str, data
Ejemplo n.º 5
0
 def sanitize(report: str) -> str:
     """
     Sanitizes a PIREP string
     """
     return sanitization.sanitize_report_string(report)
Ejemplo n.º 6
0
def parse(station: str, report: str, issued: date = None) -> (TafData, Units):
    """
    Returns TafData and Units dataclasses with parsed data and their associated units
    """
    if not report:
        return None, None
    valid_station(station)
    while len(report) > 3 and report[:4] in ("TAF ", "AMD ", "COR "):
        report = report[4:]
    ret = {
        "end_time": None,
        "raw": report,
        "remarks": None,
        "start_time": None
    }
    report = sanitization.sanitize_report_string(report)
    _, station, time = core.get_station_and_time(report[:20].split())
    ret["station"] = station
    ret["time"] = core.make_timestamp(time, target_date=issued)
    report = report.replace(station, "")
    if time:
        report = report.replace(time, "").strip()
    if uses_na_format(station):
        use_na = True
        units = Units(**NA_UNITS)
    else:
        use_na = False
        units = Units(**IN_UNITS)
    # Find and remove remarks
    report, ret["remarks"] = get_taf_remarks(report)
    # Split and parse each line
    lines = split_taf(report)
    parsed_lines = parse_lines(lines, units, use_na, issued)
    # Perform additional info extract and corrections
    if parsed_lines:
        (
            parsed_lines[-1]["other"],
            ret["max_temp"],
            ret["min_temp"],
        ) = get_temp_min_and_max(parsed_lines[-1]["other"])
        if not (ret["max_temp"] or ret["min_temp"]):
            (
                parsed_lines[0]["other"],
                ret["max_temp"],
                ret["min_temp"],
            ) = get_temp_min_and_max(parsed_lines[0]["other"])
        # Set start and end times based on the first line
        start, end = parsed_lines[0]["start_time"], parsed_lines[0]["end_time"]
        parsed_lines[0]["end_time"] = None
        ret["start_time"], ret["end_time"] = start, end
        parsed_lines = find_missing_taf_times(parsed_lines, start, end)
        parsed_lines = get_taf_flight_rules(parsed_lines)
    # Extract Oceania-specific data
    if ret["station"][0] == "A":
        (
            parsed_lines[-1]["other"],
            ret["alts"],
            ret["temps"],
        ) = get_oceania_temp_and_alt(parsed_lines[-1]["other"])
    # Convert wx codes
    for i, line in enumerate(parsed_lines):
        parsed_lines[i]["other"], parsed_lines[i]["wx_codes"] = get_wx_codes(
            line["other"])
    # Convert to dataclass
    ret["forecast"] = [TafLineData(**line) for line in parsed_lines]
    return TafData(**ret), units