def test_creates_directories_when_required_on_ensure_check(tmpdir): new_dir_path = '{}/new_dir'.format(tmpdir) assert not os.path.isdir(new_dir_path) persistence.ensure_dir(new_dir_path) assert os.path.isdir(new_dir_path)
def test_swallows_exceptions_making_new_directories(mocker): mocker.patch.object(os.path, 'exists', return_value=False) mocker.patch.object(os, 'makedirs', side_effect=PermissionError('BOOM!')) persistence.ensure_dir('/some/path/or/other') os.makedirs.assert_called_once()
def write_to_file(self, output_dir): self.update() persistence.ensure_dir(output_dir) logging.info(f'Saving auxiliary file {self.filename} in {output_dir}') if persistence.is_csv(self.filename): return self._write_csv(output_dir) elif persistence.is_json(self.filename): return self._write_json(output_dir) else: raise NotImplementedError(f'File {self.filename} is not currently supported as an auxiliary file.')
def test_does_not_overwrite_existing_dirs_on_ensure_check(tmpdir): new_dir_path = '{}/new_dir'.format(tmpdir) os.makedirs(new_dir_path) assert os.path.isdir(new_dir_path) dir_mod_time = os.path.getmtime(new_dir_path) persistence.ensure_dir(new_dir_path) assert os.path.isdir(new_dir_path) assert os.path.getmtime(new_dir_path) == dir_mod_time
def dump_all_api_requests_to_json(api_requests, output_dir): # sanitise tuple keys new_d = {} for k, v in api_requests.items(): new_d['{}'.format(k)] = '{}'.format(v) persistence.ensure_dir(output_dir) logging.info(f'Saving Google Directions API requests to {output_dir}') with open(os.path.join(output_dir, 'api_requests.json'), 'w') as fp: json.dump(new_d, fp)
def save_geodataframe(gdf, filename, output_dir, include_shp_files=False): if not gdf.empty: gdf = sanitiser.sanitise_geodataframe(gdf) persistence.ensure_dir(output_dir) gdf.to_file(os.path.join(output_dir, f'{filename}.geojson'), driver='GeoJSON') for col in [col for col in gdf.columns if is_datetime(gdf[col])]: gdf[col] = gdf[col].astype(str) if include_shp_files: shp_files = os.path.join(output_dir, 'shp_files') persistence.ensure_dir(shp_files) gdf.to_file(os.path.join(shp_files, f'{filename}.shp'))
def test_zipping_folder(tmpdir): folder = os.path.join(tmpdir, 'folder_to_zip_up') persistence.ensure_dir(folder) with open(os.path.join(folder, 'helloworld.txt'), 'wb') as f: f.write(b'hello world') f.close() assert os.path.exists(os.path.join(folder, 'helloworld.txt')) assert not os.path.exists(os.path.join(tmpdir, 'folder_to_zip_up.zip')) persistence.zip_folder(folder) assert os.path.exists(os.path.join(tmpdir, 'folder_to_zip_up.zip'))
'--null_value', help='Value that represents null in the elevation tif file', required=True) arg_parser.add_argument('-od', '--output_dir', help='Output directory for the updated network', required=True) args = vars(arg_parser.parse_args()) network = args['network'] projection = args['projection'] elevation = args['elevation'] tif_null_value = args['null_value'] output_dir = args['output_dir'] ensure_dir(output_dir) logging.basicConfig(format='%(asctime)s - %(message)s', level=logging.WARNING) logging.info('Reading in network at {}'.format(network)) n = read_matsim( path_to_network=network, epsg=projection ) logging.info('Adding elevation to network nodes') n.add_elevation_to_nodes(elevation_tif_file_path=elevation, null_value=tif_null_value) logging.info('Validating the elevation data added to network nodes') report = n.validation_report_for_node_elevation()
def write_to_matsim(self, output_dir): persistence.ensure_dir(output_dir) vehicles = matsim_xml_writer.write_matsim_schedule(output_dir, self) matsim_xml_writer.write_vehicles(output_dir, vehicles)