def calculate(request): """Calculates the buildings affected by flood. """ output = os.path.join(settings.MEDIA_ROOT, 'layers', 'impact.json') buildings = get_layer_data('Buildings') flood = get_layer_data('Flood') # assign the required keywords for inasafe calculations buildings.keywords['category'] = 'exposure' buildings.keywords['subcategory'] = 'structure' flood.keywords['category'] = 'hazard' flood.keywords['subcategory'] = 'flood' impact_function = FloodBuildingImpactFunction # run analisys impact_file = calculate_impact(layers=[buildings, flood], impact_fcn=impact_function) call(['ogr2ogr', '-f', 'GeoJSON', output, impact_file.filename]) impact_geojson = os.path.join(settings.MEDIA_URL, 'layers', 'impact.json') context = impact_file.keywords context['geojson'] = impact_geojson context['user'] = request.user return render(request, 'layers/calculate.html', context)
def calculate(request): """Calculates the buildings affected by flood. :param request: """ output = os.path.join(settings.MEDIA_ROOT, 'layers', 'impact.json') buildings = get_layer_data('Buildings') flood = get_layer_data('Flood') # assign the required keywords for inasafe calculations buildings.keywords['category'] = 'exposure' buildings.keywords['subcategory'] = 'structure' flood.keywords['category'] = 'hazard' flood.keywords['subcategory'] = 'flood' impact_function = FloodBuildingImpactFunction # run analisys impact_file = calculate_impact( layers=[buildings, flood], impact_fcn=impact_function ) call(['ogr2ogr', '-f', 'GeoJSON', output, impact_file.filename]) impact_geojson = os.path.join(settings.MEDIA_URL, 'layers', 'impact.json') context = impact_file.keywords context['geojson'] = impact_geojson context['user'] = request.user return render(request, 'layers/calculate.html', context)
def calculate(hazard_filename, exposure_filename): """ Use SAFE to calculate the impact Inputs: hazard_filename: Absolute path to hazard file exposure_filename: Absolute path to exposure file """ H = read_layer(hazard_filename) E = read_layer(exposure_filename) IF = ModisFloodImpactFunction impact_layer = calculate_impact(layers=[H, E], impact_fcn=IF) impact_filename = impact_layer.get_filename() calculated_raster = read_layer(impact_filename) return calculated_raster
def calculate(hazard_filename, exposure_filename): """ Use SAFE to calculate the impact Inputs: hazard_filename: Absolute path to hazard file exposure_filename: Absolute path to exposure file """ H = read_layer(hazard_filename) E = read_layer(exposure_filename) IF = ModisFloodImpactFunction impact_layer = calculate_impact(layers=[H, E],impact_fcn=IF) impact_filename = impact_layer.get_filename() calculated_raster = read_layer(impact_filename) return calculated_raster
def calculate(request, save_output=save_file_to_geonode): start = datetime.datetime.now() if request.method == 'GET': # FIXME: Add a basic form here to be able to generate the POST request. return HttpResponse('This should be accessed by robots, not humans.' 'In other words using HTTP POST instead of GET.') elif request.method == 'POST': data = request.POST impact_function_name = data['impact_function'] hazard_server = data['hazard_server'] hazard_layer = data['hazard'] exposure_server = data['exposure_server'] exposure_layer = data['exposure'] requested_bbox = data['bbox'] keywords = data['keywords'] if request.user.is_anonymous(): theuser = get_valid_user() else: theuser = request.user # Create entry in database calculation = Calculation(user=theuser, run_date=start, hazard_server=hazard_server, hazard_layer=hazard_layer, exposure_server=exposure_server, exposure_layer=exposure_layer, impact_function=impact_function_name, success=False) # Wrap main computation loop in try except to catch and present # messages and stack traces in the application try: # Get metadata haz_metadata = get_metadata(hazard_server, hazard_layer) exp_metadata = get_metadata(exposure_server, exposure_layer) # Determine common resolution in case of raster layers raster_resolution = get_common_resolution(haz_metadata, exp_metadata) # Get reconciled bounding boxes haz_bbox, exp_bbox, imp_bbox = get_bounding_boxes(haz_metadata, exp_metadata, requested_bbox) # Record layers to download download_layers = [(hazard_server, hazard_layer, haz_bbox), (exposure_server, exposure_layer, exp_bbox)] # Add linked layers if any FIXME: STILL TODO! # Get selected impact function plugins = get_admissible_plugins() msg = ('Could not find "%s" in "%s"' % ( impact_function_name, plugins.keys())) assert impact_function_name in plugins, msg impact_function = plugins.get(impact_function_name) impact_function_source = inspect.getsource(impact_function) # Record information calculation object and save it calculation.impact_function_source = impact_function_source calculation.bbox = bboxlist2string(imp_bbox) calculation.save() # Start computation msg = 'Performing requested calculation' #logger.info(msg) # Download selected layer objects layers = [] for server, layer_name, bbox in download_layers: msg = ('- Downloading layer %s from %s' % (layer_name, server)) #logger.info(msg) L = download(server, layer_name, bbox, raster_resolution) layers.append(L) # Calculate result using specified impact function msg = ('- Calculating impact using %s' % impact_function_name) #logger.info(msg) impact_file = calculate_impact(layers=layers, impact_fcn=impact_function) # Upload result to internal GeoServer msg = ('- Uploading impact layer %s' % impact_file.name) # Determine layer title for upload output_kw = impact_file.get_keywords() title = impact_file.get_name() + " using " + output_kw['hazard_title'] + \ " and " + output_kw['exposure_title'] result = save_output(impact_file.filename, title=title, user=theuser, overwrite=False) except Exception, e: # FIXME: Reimplement error saving for calculation. # FIXME (Ole): Why should we reimplement? # This is dangerous. Try to raise an exception # e.g. in get_metadata_from_layer. Things will silently fail. # See issue #170 #logger.error(e) errors = e.__str__() trace = exception_format(e) calculation.errors = errors calculation.stacktrace = trace calculation.save() jsondata = json.dumps({'errors': errors, 'stacktrace': trace}) return HttpResponse(jsondata, mimetype='application/json')
def post(self): result = None purpose = self.get_argument("purpose") if "pdf" in purpose: html = self.get_argument("html") output = os.path.join(DATA_PATH, 'pdf', 'report.pdf') data = open(os.path.join(ROOT, 'static', 'css', 'pdf.css')) css = data.read() data.close() HTML(string=html).write_pdf(output, stylesheets=[CSS(string=css)]) return elif "calculate" in purpose: encoding = sys.getfilesystemencoding() exposure = self.get_argument("exposure") exposure_category = self.get_argument("exposure_category") exposure_subcategory = self.get_argument("exposure_subcategory") hazard = self.get_argument("hazard") hazard_category = self.get_argument("hazard_category") hazard_subcategory = self.get_argument("hazard_subcategory") #params = {} try: hazard_layer = read_layer(hazard.encode(encoding)) exposure_layer = read_layer(exposure.encode(encoding)) # assign the required keywords for inasafe calculations exposure_layer.keywords['category'] = exposure_category exposure_layer.keywords['subcategory'] = exposure_subcategory hazard_layer.keywords['category'] = hazard_category hazard_layer.keywords['subcategory'] = hazard_subcategory #define a method that determines the correct impact function based on keywords given impact_function = FloodBuildingImpactFunction #requirements = requirements_collect(impact_function) #print requirements #requirement_check(params=params, require_str=requirements, verbose=True) output = os.path.join(DATA_PATH, 'impact', 'impact.json') output_style = os.path.join(DATA_PATH, 'impact', 'impact_style.json') output_summary = os.path.join(DATA_PATH, 'impact', 'impact_summary.html') if os.path.exists(output) and os.path.exists(output_style) \ and os.path.exists(output_summary): with open(output_summary) as summary: result = summary.read() summary.close() else: impact = calculate_impact( layers=[exposure_layer, hazard_layer], impact_fcn=impact_function ) #create the style for the impact layer with open(output_style, 'w') as style_json: json.dump(impact.style_info, style_json) style_json.close() #call(['ogr2ogr', '-f', 'GeoJSON', output, impact.filename]) #create the impact summary file result = impact.keywords["impact_summary"] with open(output_summary, 'w') as summary: summary.write(result) summary.close() except: print 'IO Error or something else has occurred!' raise else: self.render("result.html", result=result)
def get(self): # try: exposure_id = self.get_argument('e') hazard_id = self.get_argument('h') impact_name = 'impact-e%s-h%s' % (exposure_id, hazard_id) if exposure_id and hazard_id: # First check if the impact already exists in the cache try: # try to connect to the redis cache redis_server = redis.Redis() cache = True print 'Successfully connected to redis!' except: # This is just a flag that will be used later on print "I couldn't connect to redis" cache = False else: # If the impact exists, get it from the cache and return if redis_server.exists(impact_name): print 'Entry exists in cache!' writeout = redis_server.get(impact_name) self.set_header('Content-Type', 'application/javascript') self.write(writeout) return # Query the db and calculate if it doesn't try: #try connecting to the pg database conn = psycopg2.connect( "dbname='dev' user='******' password='******'") print 'Successfully connected to postgres!' except: writeout = 'Could not connect to the database!' else: # create a cursor cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) try: #1. Query the db for the layers query = 'SELECT shapefile FROM layers'+\ ' WHERE id = %s' % exposure_id cursor.execute(query) exposure = cursor.fetchone() query = 'SELECT shapefile FROM layers'+\ ' WHERE id = %s' % hazard_id cursor.execute(query) hazard = cursor.fetchone() except: writeout = 'There was something wrong with your query' conn.rollback() else: if exposure and hazard: # Pass the shapefile (paths) to read_layer try: exposure_layer = read_layer(exposure['shapefile']) hazard_layer = read_layer(hazard['shapefile']) except: writeout = 'Something went wrong when reading the layers' # Keywords exposure_dict = exposure_layer.get_keywords() hazard_dict = hazard_layer.get_keywords() if exposure_layer.is_vector: exposure_dict['layertype'] = 'vector' else: exposure_dict['layertype'] = 'raster' if hazard_layer.is_vector: hazard_dict['layertype'] = 'vector' else: exposure_dict['layertype'] = 'raster' #get optimal bounding box common_bbox = bbox_intersection( exposure_layer.get_bounding_box(), hazard_layer.get_bounding_box()) print exposure_layer.get_bounding_box() print hazard_layer.get_bounding_box() bbox_string = '' try: for val in common_bbox: bbox_string += str(val) + ' ' except: writeout = 'The layers have no intersection!' else: #gdal clip dest = 'hazard_tmp.shp' src = hazard_layer.filename print src try: call( "ogr2ogr -clipsrc %s %s %s" % \ (bbox_string, dest, src), shell=True ) except: print 'could not clip hazard' else: print 'created clipped hazard. Reading layer now.' try: clipped_hazard = read_layer("hazard_tmp.shp") except: print 'something went wrong when reading the clipped hazard' else: print clipped_hazard dest = 'exposure_tmp.shp' src = exposure_layer.filename print src try: call( "ogr2ogr -clipsrc %s %s %s" % \ (bbox_string, dest, src), shell=True ) except: print 'could not clip exposure' else: print 'created clipped exposure. Reading layer now.' try: clipped_exposure = read_layer( "exposure_tmp.shp") except: print 'something went wrong when reading the clipped exposure' else: print clipped_exposure #get impact function based on layer keywords fncs = get_admissible_plugins( [hazard_dict, exposure_dict]) impact_fnc = fncs.values()[0] layers = [clipped_hazard, clipped_exposure] # Call calculate_impact impact_file = calculate_impact( layers, impact_function) tmpfile = 'tmp%s.json' % impact_name #5. Serialize the output into json and write out # Convert the impact file into a json file call([ 'ogr2ogr', '-f', 'GeoJSON', tmpfile, impact_file.filename ]) # Open the json file f = open(tmpfile) #FIXME: Something needs to be done about the encoding # Load the file as json json_data = json.loads(f.read(), ) # Write it out as json writeout = json.dumps(json_data, ) #close the file, and delete temporary files f.close() os.remove(tmpfile) os.remove("hazard_tmp.shp") os.remove("exposure_tmp.shp") #os.remove(impact_file.filename) #6. Cache if cache: redis_server.set(impact_name, writeout) #use setex to add a cache expiry #writeout = json.dumps(impact_file.data, encoding='latin-1') else: writeout = 'Sorry, your query returned one or' + \ ' more empty matches' # except: # writeout = 'Something went wrong! Hmmm...' self.set_header('Content-Type', 'application/javascript') self.write(writeout)
def post(self): result = None purpose = self.get_argument("purpose") if "pdf" in purpose: html = self.get_argument("html") output = os.path.join(DATA_PATH, 'pdf', 'report.pdf') data = open(os.path.join(ROOT, 'static', 'css', 'pdf.css')) css = data.read() data.close() HTML(string=html).write_pdf(output, stylesheets=[CSS(string=css)]) return elif "calculate" in purpose: encoding = sys.getfilesystemencoding() exposure = self.get_argument("exposure") exposure_category = self.get_argument("exposure_category") exposure_subcategory = self.get_argument("exposure_subcategory") hazard = self.get_argument("hazard") hazard_category = self.get_argument("hazard_category") hazard_subcategory = self.get_argument("hazard_subcategory") #params = {} try: hazard_layer = read_layer(hazard.encode(encoding)) exposure_layer = read_layer(exposure.encode(encoding)) # assign the required keywords for inasafe calculations exposure_layer.keywords['category'] = exposure_category exposure_layer.keywords['subcategory'] = exposure_subcategory hazard_layer.keywords['category'] = hazard_category hazard_layer.keywords['subcategory'] = hazard_subcategory #define a method that determines the correct impact function based on keywords given impact_function = FloodBuildingImpactFunction #requirements = requirements_collect(impact_function) #print requirements #requirement_check(params=params, require_str=requirements, verbose=True) output = os.path.join(DATA_PATH, 'impact', 'impact.json') output_style = os.path.join(DATA_PATH, 'impact', 'impact_style.json') output_summary = os.path.join(DATA_PATH, 'impact', 'impact_summary.html') if os.path.exists(output) and os.path.exists(output_style) \ and os.path.exists(output_summary): with open(output_summary) as summary: result = summary.read() summary.close() else: impact = calculate_impact( layers=[exposure_layer, hazard_layer], impact_fcn=impact_function) #create the style for the impact layer with open(output_style, 'w') as style_json: json.dump(impact.style_info, style_json) style_json.close() #call(['ogr2ogr', '-f', 'GeoJSON', output, impact.filename]) #create the impact summary file result = impact.keywords["impact_summary"] with open(output_summary, 'w') as summary: summary.write(result) summary.close() except: print 'IO Error or something else has occurred!' raise else: self.render("result.html", result=result)
def calculate(request, save_output=save_file_to_geonode): start = datetime.datetime.now() if request.method == 'GET': # FIXME: Add a basic form here to be able to generate the POST request. return HttpResponse('This should be accessed by robots, not humans.' 'In other words using HTTP POST instead of GET.') elif request.method == 'POST': data = request.POST impact_function_name = data['impact_function'] hazard_server = data['hazard_server'] hazard_layer = data['hazard'] exposure_server = data['exposure_server'] exposure_layer = data['exposure'] requested_bbox = data['bbox'] keywords = data['keywords'] if request.user.is_anonymous(): theuser = get_valid_user() else: theuser = request.user # Create entry in database calculation = Calculation(user=theuser, run_date=start, hazard_server=hazard_server, hazard_layer=hazard_layer, exposure_server=exposure_server, exposure_layer=exposure_layer, impact_function=impact_function_name, success=False) # Wrap main computation loop in try except to catch and present # messages and stack traces in the application try: # Get metadata haz_metadata = get_metadata(hazard_server, hazard_layer) exp_metadata = get_metadata(exposure_server, exposure_layer) # Determine common resolution in case of raster layers raster_resolution = get_common_resolution(haz_metadata, exp_metadata) # Get reconciled bounding boxes haz_bbox, exp_bbox, imp_bbox = get_bounding_boxes( haz_metadata, exp_metadata, requested_bbox) # Record layers to download download_layers = [(hazard_server, hazard_layer, haz_bbox), (exposure_server, exposure_layer, exp_bbox)] # Add linked layers if any FIXME: STILL TODO! # Get selected impact function plugins = get_admissible_plugins() msg = ('Could not find "%s" in "%s"' % (impact_function_name, plugins.keys())) assert impact_function_name in plugins, msg impact_function = plugins.get(impact_function_name) impact_function_source = inspect.getsource(impact_function) # Record information calculation object and save it calculation.impact_function_source = impact_function_source calculation.bbox = bboxlist2string(imp_bbox) calculation.save() # Start computation msg = 'Performing requested calculation' #logger.info(msg) # Download selected layer objects layers = [] for server, layer_name, bbox in download_layers: msg = ('- Downloading layer %s from %s' % (layer_name, server)) #logger.info(msg) L = download(server, layer_name, bbox, raster_resolution) layers.append(L) # Calculate result using specified impact function msg = ('- Calculating impact using %s' % impact_function_name) #logger.info(msg) impact_file = calculate_impact(layers=layers, impact_fcn=impact_function) # Upload result to internal GeoServer msg = ('- Uploading impact layer %s' % impact_file.name) # Determine layer title for upload output_kw = impact_file.get_keywords() title = impact_file.get_name() + " using " + output_kw['hazard_title'] + \ " and " + output_kw['exposure_title'] result = save_output(impact_file.filename, title=title, user=theuser, overwrite=False) except Exception, e: # FIXME: Reimplement error saving for calculation. # FIXME (Ole): Why should we reimplement? # This is dangerous. Try to raise an exception # e.g. in get_metadata_from_layer. Things will silently fail. # See issue #170 #logger.error(e) errors = e.__str__() trace = exception_format(e) calculation.errors = errors calculation.stacktrace = trace calculation.save() jsondata = json.dumps({'errors': errors, 'stacktrace': trace}) return HttpResponse(jsondata, mimetype='application/json')
def get(self): # try: exposure_id = self.get_argument('e') hazard_id = self.get_argument('h') impact_name = 'impact-e%s-h%s' % (exposure_id, hazard_id) if exposure_id and hazard_id: # First check if the impact already exists in the cache try: # try to connect to the redis cache redis_server = redis.Redis() cache = True print 'Successfully connected to redis!' except: # This is just a flag that will be used later on print "I couldn't connect to redis" cache = False else: # If the impact exists, get it from the cache and return if redis_server.exists(impact_name): print 'Entry exists in cache!' writeout = redis_server.get(impact_name) self.set_header('Content-Type', 'application/javascript') self.write(writeout) return # Query the db and calculate if it doesn't try: #try connecting to the pg database conn = psycopg2.connect( "dbname='dev' user='******' password='******'" ) print 'Successfully connected to postgres!' except: writeout = 'Could not connect to the database!' else: # create a cursor cursor = conn.cursor( cursor_factory = psycopg2.extras.DictCursor ) try: #1. Query the db for the layers query = 'SELECT shapefile FROM layers'+\ ' WHERE id = %s' % exposure_id cursor.execute(query) exposure = cursor.fetchone() query = 'SELECT shapefile FROM layers'+\ ' WHERE id = %s' % hazard_id cursor.execute(query) hazard = cursor.fetchone() except: writeout = 'There was something wrong with your query' conn.rollback() else: if exposure and hazard: # Pass the shapefile (paths) to read_layer try: exposure_layer = read_layer(exposure['shapefile']) hazard_layer = read_layer(hazard['shapefile']) except: writeout = 'Something went wrong when reading the layers' # Keywords exposure_dict = exposure_layer.get_keywords() hazard_dict = hazard_layer.get_keywords() if exposure_layer.is_vector: exposure_dict['layertype'] = 'vector' else: exposure_dict['layertype'] = 'raster' if hazard_layer.is_vector: hazard_dict['layertype'] = 'vector' else: exposure_dict['layertype'] = 'raster' #get optimal bounding box common_bbox = bbox_intersection( exposure_layer.get_bounding_box(), hazard_layer.get_bounding_box() ) print exposure_layer.get_bounding_box() print hazard_layer.get_bounding_box() bbox_string = '' try: for val in common_bbox: bbox_string += str(val) + ' ' except: writeout = 'The layers have no intersection!' else: #gdal clip dest = 'hazard_tmp.shp' src = hazard_layer.filename print src try: call( "ogr2ogr -clipsrc %s %s %s" % \ (bbox_string, dest, src), shell=True ) except: print 'could not clip hazard' else: print 'created clipped hazard. Reading layer now.' try: clipped_hazard = read_layer("hazard_tmp.shp") except: print 'something went wrong when reading the clipped hazard' else: print clipped_hazard dest = 'exposure_tmp.shp' src = exposure_layer.filename print src try: call( "ogr2ogr -clipsrc %s %s %s" % \ (bbox_string, dest, src), shell=True ) except: print 'could not clip exposure' else: print 'created clipped exposure. Reading layer now.' try: clipped_exposure = read_layer("exposure_tmp.shp") except: print 'something went wrong when reading the clipped exposure' else: print clipped_exposure #get impact function based on layer keywords fncs = get_admissible_plugins([hazard_dict, exposure_dict]) impact_fnc = fncs.values()[0] layers = [clipped_hazard, clipped_exposure] # Call calculate_impact impact_file = calculate_impact( layers, impact_function ) tmpfile = 'tmp%s.json' % impact_name #5. Serialize the output into json and write out # Convert the impact file into a json file call(['ogr2ogr', '-f', 'GeoJSON', tmpfile, impact_file.filename]) # Open the json file f = open(tmpfile) #FIXME: Something needs to be done about the encoding # Load the file as json json_data = json.loads( f.read(), ) # Write it out as json writeout = json.dumps( json_data, ) #close the file, and delete temporary files f.close() os.remove(tmpfile) os.remove("hazard_tmp.shp") os.remove("exposure_tmp.shp") #os.remove(impact_file.filename) #6. Cache if cache: redis_server.set(impact_name, writeout) #use setex to add a cache expiry #writeout = json.dumps(impact_file.data, encoding='latin-1') else: writeout = 'Sorry, your query returned one or' + \ ' more empty matches' # except: # writeout = 'Something went wrong! Hmmm...' self.set_header('Content-Type', 'application/javascript') self.write(writeout)
def get(self): data = dict() encoding = sys.getfilesystemencoding() exposure_name = '' hazard_name = "%s.shp" % self.get_argument("hazard_name") hazard_path = os.path.join(DATA_PATH, 'hazard', hazard_name) impact_function_keyword = self.get_argument("impact_function") if impact_function_keyword == 'structure': exposure_name = "%s.shp" % self.get_argument("exposure_name") #impact_function = FloodBuildingImpactFunction impact_function = NOAHFloodBuildingImpactFunction elif impact_function_keyword == 'population': exposure_name = "%s.tif" % self.get_argument("exposure_name") impact_function = FloodEvacuationFunctionVectorHazard exposure_path = os.path.join(DATA_PATH, 'exposure', exposure_name) try: hazard_layer = read_layer(hazard_path.encode(encoding)) exposure_layer = read_layer(exposure_path.encode(encoding)) # hardcoded the required keywords for inasafe calculations exposure_layer.keywords['category'] = 'exposure' hazard_layer.keywords['category'] = 'hazard' hazard_layer.keywords['subcategory'] = 'flood' if impact_function_keyword == 'structure': exposure_layer.keywords['subcategory'] = 'structure' elif impact_function_keyword == 'population': exposure_layer.keywords['subcategory'] = 'population' haz_fnam, ext = os.path.splitext(hazard_name) exp_fnam, ext = os.path.splitext(exposure_name) impact_base_name = "IMPACT_%s_%s" % (exp_fnam, haz_fnam) impact_filename = impact_base_name + '.shp' impact_summary = "IMPACT_%s_%s.html" % (exp_fnam, haz_fnam) output = str(os.path.join(DATA_PATH, 'impact', impact_filename)) output_summary = str(os.path.join(DATA_PATH, 'impact summary', impact_summary)) if os.path.exists(output) and os.path.exists(output_summary): print 'impact file and impact summary already exists!' data = { 'return': 'success', 'resource': impact_base_name, } with open(output_summary) as html: data['html'] = html.read() print_pdf(data['html'], impact_base_name) html.close() else: try: impact = calculate_impact( layers=[exposure_layer, hazard_layer], impact_fcn=impact_function ) impact.write_to_file(output) data = upload_impact_vector(output) #create the impact summary file make_data_dirs() result = impact.keywords["impact_summary"] with open(output_summary, 'w+') as summary: summary.write(result) summary.close() if impact_function_keyword == 'population': make_style(impact_base_name, impact.style_info) set_style(impact_base_name, impact_base_name) else: set_style(impact_base_name, "Flood-Building") data['html'] = result print_pdf(result, impact_base_name) except: raise except: print 'IO Error or something else has occurred!' raise else: self.set_header("Content-Type", "application/json") self.write(json.dumps(data))