def main(filename): if os.path.isdir(args.file): print("loading in all files in folder:", filename) processing_files = utils.get_all_files(filename) elif os.path.isfile(args.file): print("loading in this file:", filename) processing_files = utils.get_one_file(filename) else: print("neither file, nor folder. ending programme.") return print("processing", len(processing_files)) print("") for file in processing_files: print("processing", file) load_dump1090_file(file) print("convert raw adsb files") print("decode converted raw adsb files") print("decode again planes by icao address. #1") print("decode again planes by icao address. #n") print("")
def main(path): if os.path.isdir(path): print("loading in all files in folder:", path) processing_files = utils.get_all_files(path) elif os.path.isfile(path): print("loading in this file:", path) processing_files = utils.get_one_file(path) else: print("neither file, nor folder. ending programme.") return if len(processing_files) == 0: exit("No input files found in the directory. Quitting") print("processing mlat") print("") for file in processing_files: print("processing", file) if Path(file).suffix == ".gz": data = load_file_jsonGzip(file) else: with open(file, 'r') as f: data = json.load(f) print(data["meta"])
def main(filename, output, latitude, longitude, altitude, timestart, timeend, gs_id): ''' The expected inputs to the filename parameter: Path to a file, path to a folder. :param filename: The path to the folder/file which cotains the RAW ADS-B. :param output: Output path :param latitude: Latitude coordinate of the ground station :param longitude: Longitude coordinate of the ground station ''' path = "data" + os.sep + "adsb" if output is not None: if output.find(os.sep, 0) != len(output) - 1: path = output + os.sep + "data" + os.sep + "adsb" else: path = output + "data" + os.sep + "adsb" if os.path.isdir(filename): print("loading in all files in folder:", filename) processing_files = utils.get_all_files(filename) elif os.path.isfile(filename): print("loading in this file:", filename) processing_files = utils.get_one_file(filename) else: print("neither file, nor folder. ending programme.") return if len(processing_files) == 0: exit("No input files found in the directory. Quitting") print("processing", len(processing_files)) print("") for file in processing_files: print("processing", file) data = load_dump1090_file(file) # storing meta data data["meta"]["gs_rec_timestamp_start"] = timestart data["meta"]["gs_rec_timestamp_end"] = timeend data["meta"]["gs_id"] = gs_id if data["meta"]["gs_lat"] is None and data["meta"]["gs_lon"] is None and \ latitude is not None and longitude is not None: # if the gs location is already set, we don't need the inputs. # if they are set, we take them from the loaded data structure. data["meta"]["gs_lat"] = float(latitude) data["meta"]["gs_lon"] = float(longitude) data["meta"]["gs_alt"] = float(altitude) print("input lat & long:", data["meta"]["gs_lat"], data["meta"]["gs_lon"]) data = decode(data) print("storing adsb-data") if "gzip" == "gzip": # standard output utils.store_file_jsonGzip(path, file, data) else: utils.store_file(path, file, data) print("doing mlat stuff from here on...") multilateration.main(path)
def main(path): if os.path.isdir(path): print("loading in all files in folder:", path) processing_files = utils.get_all_files(path) elif os.path.isfile(path): print("loading in this file:", path) processing_files = utils.get_one_file(path) else: print("neither file, nor folder. ending programme.") return if len(processing_files) == 0: exit("No input files found in the directory. Quitting") print("processing mlat") print("") # sorting the files for overlapping time spans for file_base in processing_files: print("processing", file_base) data_base = load_file_json(file_base) start_base = float(data_base["meta"]["gs_rec_timestamp_start"]) if data_base["meta"]["gs_rec_timestamp_end"] is not None: end_base = float(data_base["meta"]["gs_rec_timestamp_end"]) else: end_base = float(data_base["meta"]["gs_rec_timestamp_start"]) + \ float(data_base["data"][-1]["SamplePos"]) / float(data_base["meta"]["gs_rec_samplingrate"]) # in this, we will store the overlapping files we will later process further load_files = [] for file_compare in processing_files: data_compare = load_file_json(file_compare) start_compare = float( data_compare["meta"]["gs_rec_timestamp_start"]) if data_compare["meta"]["gs_rec_timestamp_end"] is not None: end_compare = float( data_compare["meta"]["gs_rec_timestamp_end"]) else: end_compare = float(data_compare["meta"]["gs_rec_timestamp_start"]) + \ float(data_compare["data"][-1]["SamplePos"]) / float(data_compare["meta"]["gs_rec_samplingrate"]) if start_compare <= start_base <= end_compare and start_compare <= end_base <= end_compare: print("file is fully inside basefile") if start_compare <= start_base <= end_compare is True and \ start_compare <= end_base <= end_compare is False: print("file is starting in basefile and ends later") if start_compare <= start_base <= end_compare is False and \ start_compare <= end_base <= end_compare is True: print("file is ending in basefile and starts earlier") if start_compare <= start_base <= end_compare is False and \ start_compare <= end_base <= end_compare is False: print("file is not overlapping with basefile") if start_compare <= start_base <= end_compare or start_compare <= end_base <= end_compare: #storing all overlapping files load_files.append(file_compare) print("these", len(load_files), "files overlap:", load_files)