def main(): """ Scenario is generated based on random flight arrangements. Flights are appeared every `interval` seconds where `interval` is random variable given a predefined mean and deviation. """ # Creates the output folder create_output_folder(OUTPUT_FOLDER) current_time = 0 # In this scenario, we only have departure flights to simply the problem departures = [] while current_time < END_TIME: flight = generate_flight_at(current_time) departures.append(flight) interval = get_random_time_interval() current_time += interval scenario = {"arrivals": [], "departures": departures} # Saves to file output_filename = OUTPUT_FOLDER + "scenario.json" export_to_json(output_filename, scenario) logger.debug("Done")
def process(self): """This function is used to save processed files""" data_existence = check_if_data() if data_existence: log.info("Create output folder if not exists") create_output_folder() log.info("Process Movies") movies = self.handle_movies_df() self.store_df(movies, 'output/movies.csv') log.info("Processed Movies stored") log.info("Process Movie Genres") transformed_genres = self.transform_genres() self.store_df(transformed_genres, 'output/genres.csv') log.info("Process Movies Cast") transformed_cast = self.handle_actors_df() self.store_df(transformed_cast, 'output/cast.csv') log.info('Processed cast stored') log.info('Process Movie Crew') transformed_crew = self.handle_crew_df() self.store_df(transformed_crew, 'output/crew.csv') log.info('Processed Crew stored') log.info('Process Movie keywords') transformed_kws = self.handle_keywords() self.store_df(transformed_kws, 'output/keywords.csv') log.info("Processed Movie keywords stored") else: log.error("Place your dataset inside data folder")
def main(): # Creates the output folder create_output_folder(OUTPUT_FOLDER) # Gets KML document from file kml_doc = get_kml_document() # Generates airport data logger.debug("Generating airport metadata") generate_airport_data(kml_doc) logger.debug("Airport metadata generated") # Generates gate data logger.debug("Generating gate data") generate_node_data(kml_doc, LayerType.gate, "gates.json") logger.debug("Gate data generated") # Generates spot position logger.debug("Generating spot position data") generate_node_data(kml_doc, LayerType.spot, "spots.json") logger.debug("Spot position data generated") # Generates inter position logger.debug("Generating inter position data") generate_node_data(kml_doc, LayerType.inter, "inters.json") logger.debug("Inter position data generated") # Generates inter position for sijia logger.debug("Generating debug position data") generate_node_data(kml_doc, LayerType.debug, "debug.json") logger.debug("Debug position data generated") # Generates runway data logger.debug("Genenrating runway data") # generate_link_data(kml_doc, LayerType.runway_and_taxiway, "runways.json", # "runway", True) generate_link_data(kml_doc, LayerType.runway, "runways.json") logger.debug("Runway data generated") # Generates taxiway data logger.debug("Genenrating taxiway data") # generate_link_data(kml_doc, LayerType.runway_and_taxiway, "taxiways.json", # "taxiway", True) generate_link_data(kml_doc, LayerType.taxiway, "taxiways.json") logger.debug("Taxiway data generated") # Generates pushback way data logger.debug("Genenrating pushback way data") generate_link_data(kml_doc, LayerType.pushback_way, "pushback_ways.json") logger.debug("Pushback way data generated") # Warning: Generates scenario logger.debug("Generate scenario using generate_scenario.py")
def main(): departures = get_departure_from_csv() arrivals = get_arrival_from_csv() print(len(departures)) print(len(arrivals)) scenario = {"arrivals": arrivals, "departures": departures} print(scenario) create_output_folder(OUTPUT_FOLDER) output_filename = OUTPUT_FOLDER + "scenario.json" export_to_json(output_filename, scenario) logger.debug("Generating gate spots data") gate_spots_filename = OUTPUT_FOLDER + "gates_spots.json" export_to_json(gate_spots_filename, spots_to_gates) logger.debug("Done")
def main(): config = load_config() working_dir, output_path = create_output_folder() img_file_list = list(working_dir.glob('**/*.bmp')) for img_file in tqdm(img_file_list): src = read_image(img_file, config) dst = DistortionRemover(src, config).image_undist() out_file = str(output_path / (img_file.stem + "_undist.bmp")) write_image(dst, out_file) return True
def main(): config = load_config() working_dir, output_path = create_output_folder() img_file_list = list(working_dir.glob('**/*.bmp')) img_num = len(img_file_list) # For progress bar for i, img_file in enumerate(img_file_list): src = read_image(img_file, config) dst = DistortionRemover(src, config).image_undist() out_file = str(output_path / (img_file.stem + "_undist.bmp")) write_image(dst, out_file) print("Processed:" + str(i+1) + "/" + str(img_num)) return True
w2ply = True # Try to run on GPU ti.init(arch=ti.cuda, kernel_profiler=True, use_unified_memory=False, device_memory_fraction=0.7) max_num_particles = 4000000 if with_gui: gui = ti.GUI("MLS-MPM", res=512, background_color=0x112F41) if write_to_disk or w2ply: output_dir = create_output_folder('./sim') def load_mesh(fn, scale, offset): print(f'loading {fn}') plydata = PlyData.read(fn) x = plydata['vertex']['x'] y = plydata['vertex']['y'] z = plydata['vertex']['z'] elements = plydata['face'] num_tris = len(elements['vertex_indices']) triangles = np.zeros((num_tris, 9), dtype=np.float32) for i, face in enumerate(elements['vertex_indices']): assert len(face) == 3 for d in range(3):