def to_csv(self, target_path, target_proj4=None, **kw): try: t = concatenate_tables( _get_instance_for_csv( x, source_proj4 or LONGITUDE_LATITUDE_PROJ4, target_proj4, ) for source_proj4, x in self.groupby('geometry_proj4')) except ValueError: t = self excluded_column_names = [] unique_geometry_layers = t['geometry_layer'].unique() unique_geometry_proj4s = t['geometry_proj4'].unique() if len(unique_geometry_layers) == 1: excluded_column_names.append('geometry_layer') if len(unique_geometry_proj4s) == 1: excluded_column_names.append('geometry_proj4') geometry_proj4 = unique_geometry_proj4s[0] if geometry_proj4 != LONGITUDE_LATITUDE_PROJ4: open(replace_file_extension(target_path, '.proj4'), 'wt').write(geometry_proj4) if len(t) == 0: excluded_column_names.extend( ['geometry_layer', 'geometry_proj4', 'geometry_object']) t['wkt'] = '' t = t.drop(columns=excluded_column_names) with TemporaryStorage() as storage: temporary_path = join(storage.folder, 'geotable.csv') super(GeoTable, t).to_csv(temporary_path, **kw) if has_archive_extension(target_path): compress(storage.folder, target_path) else: move_path(target_path, temporary_path)
def to_gdal(self, target_path, target_proj4=None, driver_name='ESRI Shapefile'): gdal_driver = gdal.GetDriverByName(driver_name) if not gdal_driver: raise GeoTableError('gdal driver missing (%s)' % driver_name) try: geometry_columns = _get_geometry_columns(self) except GeoTableError as e: table = self else: table = self.drop(columns=geometry_columns) as_archive = has_archive_extension(target_path) as_kmz = target_path.endswith('.kmz') with TemporaryStorage() as storage: if as_archive: gdal_dataset_path = storage.folder elif as_kmz: gdal_dataset_path = join(storage.folder, get_file_stem(target_path) + '.kml') else: gdal_dataset_path = target_path gdal_dataset = gdal_driver.Create(gdal_dataset_path, 0, 0) for layer_name, layer_t in table.groupby('geometry_layer'): _prepare_gdal_layer(layer_t, gdal_dataset, target_proj4, layer_name) gdal_dataset.FlushCache() if as_archive: compress(storage.folder, target_path) elif as_kmz: compress_zip(storage.folder, target_path)
def test_resolve_source_folder_link(self, sandbox, target_folder): source_folder = sandbox.source_folder_link_path target_path = source_folder + self.extension compress(source_folder, target_path, [ sandbox.external_folder]) target_folder = uncompress(target_path, target_folder) assert_archive_contents(target_folder, sandbox)
def run(): target_folder = mkdtemp() journal_names = sorted(set( request.form.get('journal_names', '').splitlines())) text_terms = sorted(set( request.form.get('text_terms', '').splitlines())) mesh_terms = sorted(set( request.form.get('mesh_terms', '').splitlines())) custom_expression = request.form.get('custom_expression', '') try: from_date = parse_date(request.form.get('from_date')) to_date = parse_date(request.form.get('to_date')) except (TypeError, ValueError): from_date, to_date = None, None date_interval_in_years = int( request.form.get('date_interval_in_years')) result_properties = run_script( target_folder, journal_names, text_terms, mesh_terms, custom_expression, from_date, to_date, date_interval_in_years) timestamp = datetime.datetime.now().strftime('%Y%m%d-%M%H') archive_nickname = '%s-%s' % ( timestamp, security.make_random_string(16)) archive_path = join(results_folder, archive_nickname + '.zip') disk.compress(target_folder, archive_path) source_image_path = join(target_folder, result_properties['image_name']) target_image_path = join(results_folder, archive_nickname + '.png') shutil.copy(source_image_path, target_image_path) return render_template( 'response.html', archive_name=basename(archive_path), image_name=basename(target_image_path), result_properties=result_properties)
def test_include_external_link(self, sandbox, target_folder): source_folder = sandbox.source_folder target_path = source_folder + self.extension with raises(IOError): compress(source_folder, target_path) target_folder = uncompress(target_path, target_folder) assert_archive_contents(target_folder, sandbox) target_path = compress(source_folder, target_path, [sandbox.external_folder]) target_folder = uncompress(target_path, target_folder) assert_archive_contents(target_folder, sandbox)
def test_include_external_link(self, sandbox, target_folder): source_folder = sandbox.source_folder target_path = source_folder + self.extension with raises(IOError): compress(source_folder, target_path) target_folder = uncompress(target_path, target_folder) assert_archive_contents(target_folder, sandbox) target_path = compress(source_folder, target_path, [ sandbox.external_folder]) target_folder = uncompress(target_path, target_folder) assert_archive_contents(target_folder, sandbox)
def run(): target_folder = mkdtemp() journal_names = load_terms(request.form.get('journal_names', '')) author_names = load_terms(request.form.get('author_names', '')) text_terms = load_terms(request.form.get('text_terms', '')) mesh_terms = load_terms(request.form.get('mesh_terms', '')) custom_expression = request.form.get('custom_expression', '') try: from_date = parse_date(request.form.get('from_date')) to_date = parse_date(request.form.get('to_date')) except (TypeError, ValueError): from_date, to_date = None, None if request.form.get('date_interval_in_years') is not None: date_interval_in_years = int( request.form.get('date_interval_in_years')) else: date_interval_in_years = None result_properties = run_script( target_folder, journal_names, text_terms, mesh_terms, custom_expression, author_names, from_date, to_date, date_interval_in_years) timestamp = datetime.datetime.now().strftime('%Y%m%d-%M%H') archive_nickname = '%s-%s' % (timestamp, make_random_string(16)) archive_path = join(results_folder, archive_nickname + '.zip') compress(target_folder, archive_path) if 'image_name' in result_properties: source_image_path = join( target_folder, result_properties['image_name']) target_image_path = join(results_folder, archive_nickname + '.png') shutil.copy(source_image_path, target_image_path) return render_template( 'response.html', archive_name=basename(archive_path), image_name=basename(target_image_path), result_properties=result_properties) else: return render_template( 'author_response.html', archive_name=basename(archive_path), name_count_packs=zip( result_properties['author_names'], result_properties['article_counts']))
def post(self): try: server_token = expect_variable('server_token') except KeyError as e: self.set_status(401) return self.write({}) server_url = expect_variable('server_url', 'https://crosscompute.com') notebook_id = expect_variable('notebook_id', '') notebook_path = self.get_argument('notebook_path') try: tool_definition = IPythonNotebookTool.prepare_tool_definition( notebook_path, with_debugging=False) except CrossComputeError as e: self.set_status(400) return self.write({'text': str(e)}) archive_path = compress(tool_definition['configuration_folder']) response = requests.post( server_url + '/tools.json', headers={ 'Authorization': 'Bearer ' + server_token, }, data={ 'notebook_id': notebook_id, 'notebook_path': notebook_path, 'environment_level': expect_variable('environment_level', 0), 'processor_level': expect_variable('processor_level', 0), 'memory_level': expect_variable('memory_level', 0), } if notebook_id else {}, files={ 'tool_folder': open(archive_path, 'rb'), }) status_code = response.status_code if status_code != 200: self.set_status(status_code) return self.write({}) try: d = response.json() tool_url = d['tool_url'] except Exception: self.set_status(503) return self.write({}) self.write({'tool_url': server_url + tool_url})
def test_resolve_source_folder_link(self, sandbox, target_folder): source_folder = sandbox.source_folder_link_path target_path = source_folder + self.extension compress(source_folder, target_path, [sandbox.external_folder]) target_folder = uncompress(target_path, target_folder) assert_archive_contents(target_folder, sandbox)
def test_resolve_source_folder_link(self, sandbox, tmpdir): source_folder = sandbox.source_folder_link_path target_path = compress(source_folder, source_folder + self.extension) target_folder = uncompress(target_path, str(tmpdir)) assert_contents(target_folder, sandbox)
def test_include_external_link(self, sandbox, tmpdir): source_folder = sandbox.source_folder target_path = compress(source_folder, source_folder + self.extension) target_folder = uncompress(target_path, str(tmpdir)) assert_contents(target_folder, sandbox)