def data_model_obj(db, db_models): instance = db_models.DataFieldsModel(big_integer_field=1000, boolean_field=False, char_field='This a char field', date_field=datetime.today(), datetime_field=datetime.now(), decimal_field=Decimal('3.56'), email_field='*****@*****.**', float_field=1.45, integer_field=10, null_boolean_field=None, positive_integer_field=200000, positive_small_integer_field=10, small_integer_field=20, text_field="The text field value", time_field=datetime.now().time(), url_field="http://www.test.com", custom_field="custom field text", file_path_field=os.listdir( tempfile.gettempdir())[0]) file_temp = NamedTemporaryFile(delete=True) file_temp.write(file_temp.read(1)) file_temp.flush() instance.file_field.save('test_file', File(file_temp)) file_temp.close() instance.save() return instance
def getPGSQL(self, projection="EPSG:4326"): # Create temporary named file of GeoJSON output tmp_json_file = NamedTemporaryFile(mode='w+', delete=True, suffix='.geo.json') tmp_json_file.write(self.getGeoJSON(projection)) tmp_json_file.seek(0) # Create temporary named file for new pgsql output # we're using NamedTemporaryFile to create the unique filename... tmp_sql_file = NamedTemporaryFile(mode='w+', delete=True, suffix='.sql') tmp_sql_file_name = tmp_sql_file.name tmp_sql_file.close() # ... But we let ogr2ogr actually create the file and manually delete later. # call ogr2ogr to populate .sql file os.system( 'ogr2ogr -f "PGDUMP" %s %s -lco SRID=4326 -lco SCHEMA=public -lco EXTRACT_SCHEMA_FROM_LAYER_NAME="NO"' % (tmp_sql_file_name, tmp_json_file.name)) # get sql string from .sql file tmp_sql_file = open(tmp_sql_file_name) sql_str = tmp_sql_file.read() # close all temporary files tmp_json_file.close() # tmp_sql_file.close() os.remove(tmp_sql_file_name) # return sql string. return sql_str