def create_upload_session(self, upload_file): """ Creates an upload session from the file. """ upload = UploadedData.objects.create(user=self.request.user, state='UPLOADED', complete=True) upload_file.upload = upload upload_file.save() upload.size = upload_file.file.size upload.name = upload_file.name upload.file_type = self.get_file_type(upload_file.file.path) upload.save() bucket_name = getattr(settings, 'OSGEO_STORAGE_BUCKET_NAME', None) if bucket_name: conn = boto.connect_s3() bucket = conn.get_bucket(bucket_name) u = uuid.uuid4() k = Key(bucket) k.key = 'osgeo_importer/{}{}'.format( u, os.path.splitext(upload_file.file.path)[1]) k.set_contents_from_filename(upload_file.file.path) conn.close() upload.metadata = '{}:{}'.format(bucket_name, k.key) upload.save() description = self.get_fields(upload_file.file.path) for layer in description: configuration_options = DEFAULT_LAYER_CONFIGURATION.copy() configuration_options.update({'index': layer.get('index')}) upload.uploadlayer_set.add( UploadLayer(name=layer.get('name'), fields=layer.get('fields', {}), index=layer.get('index'), feature_count=layer.get('feature_count'), configuration_options=configuration_options)) upload.save() return upload
def configure_upload(self, upload, files): """ *upload*: new, unsaved UploadedData instance ( from upload() ) *desc*: 1. sets up the directory for the upload, and populates 2. moves the files to the uploads directory 3. creates UploadFile & UploadeLayer instances related to *upload* """ from osgeo_importer.models import UploadFile, UploadLayer, DEFAULT_LAYER_CONFIGURATION upload.save() # Create Upload Directory based on Upload PK outpath = os.path.join('osgeo_importer_uploads', str(upload.pk)) outdir = os.path.join(FileSystemStorage().location, outpath) if not os.path.exists(outdir): os.makedirs(outdir) # Move all files to uploads directory using upload pk # Must be done for all files before saving upfile for validation finalfiles = [] for each in files: tofile = os.path.join(outdir, os.path.basename(each.name)) shutil.move(each.name, tofile) finalfiles.append(tofile) # Loop through and create uploadfiles and uploadlayers upfiles = [] styles = [os.path.basename(x) for x in finalfiles if '.sld' in x.lower()] for each in finalfiles: upfile = UploadFile.objects.create(upload=upload) upfiles.append(upfile) upfile.file.name = each # Detect and store file type for later reporting, since it is no # longer true that every upload has only one file type. try: upfile.file_type = self.get_file_type(each) except NoDataSourceFound: upfile.file_type = None upfile.save() upfile_basename = os.path.basename(each) _, upfile_ext = os.path.splitext(upfile_basename) # If this file isn't part of a shapefile if upfile_ext.lower() not in ['.prj', '.dbf', '.shx']: description = self.get_fields(each) for layer_desc in description: configuration_options = DEFAULT_LAYER_CONFIGURATION.copy() configuration_options.update({'index': layer_desc.get('index')}) if styles: configuration_options.update({'styles': styles}) # layer_basename is the string to start the layer name with # The inspector will use a full path to the file for .tif layer names. # We'll use just the basename of the path (no modification if it's not a path). layer_basename = os.path.basename(layer_desc.get('layer_name') or '') if not layer_basename: msg = ('No layer name provided by inspector, using' ' name of file containing layer as layer_basename') logger.error(msg) layer_basename = os.path.basename(upfile.file.name) internal_layer_name = layer_basename # Use underscores in place of dots & spaces. layer_basename = re.sub('[. ]', '_', layer_basename) layer_name = self.uniquish_layer_name(layer_basename) with db.transaction.atomic(): while UploadLayer.objects.filter(name=layer_name).exists(): layer_name = self.uniquish_layer_name(layer_basename) upload_layer = UploadLayer( upload_file=upfile, name=layer_name, internal_layer_name=internal_layer_name, layer_name=layer_name, layer_type=layer_desc['layer_type'], fields=layer_desc.get('fields', {}), index=layer_desc.get('index'), feature_count=layer_desc.get('feature_count', None), configuration_options=configuration_options ) # If we wait for upload.save(), we may introduce layer_name collisions. upload_layer.save() upload.uploadlayer_set.add(upload_layer) upload.size = sum( upfile.file.size for upfile in upfiles ) upload.complete = True upload.state = 'UPLOADED' upload.save()
def configure_upload(self, upload, files): """ *upload*: new, unsaved UploadedData instance ( from upload() ) *desc*: 1. sets up the directory for the upload, and populates 2. moves the files to the uploads directory 3. creates UploadFile & UploadeLayer instances related to *upload* """ from osgeo_importer.models import UploadFile, UploadLayer, DEFAULT_LAYER_CONFIGURATION upload.save() # Create Upload Directory based on Upload PK outpath = os.path.join('osgeo_importer_uploads', str(upload.pk)) outdir = os.path.join(FileSystemStorage().location, outpath) if not os.path.exists(outdir): os.makedirs(outdir) # Move all files to uploads directory using upload pk # Must be done for all files before saving upfile for validation finalfiles = [] for each in files: tofile = os.path.join(outdir, os.path.basename(each.name)) shutil.move(each.name, tofile) finalfiles.append(tofile) # Loop through and create uploadfiles and uploadlayers upfiles = [] for each in finalfiles: upfile = UploadFile.objects.create(upload=upload) upfiles.append(upfile) upfile.file.name = each # Detect and store file type for later reporting, since it is no # longer true that every upload has only one file type. try: upfile.file_type = self.get_file_type(each) except NoDataSourceFound: upfile.file_type = None upfile.save() upfile_basename = os.path.basename(each) _, upfile_ext = os.path.splitext(upfile_basename) # If this file isn't part of a shapefile if upfile_ext.lower() not in ['.prj', '.dbf', '.shx']: description = self.get_fields(each) for layer_desc in description: configuration_options = DEFAULT_LAYER_CONFIGURATION.copy() configuration_options.update( {'index': layer_desc.get('index')}) # layer_basename is the string to start the layer name with # The inspector will use a full path to the file for .tif layer names. # We'll use just the basename of the path (no modification if it's not a path). layer_basename = os.path.basename( layer_desc.get('layer_name') or '') if not layer_basename: msg = ( 'No layer name provided by inspector, using' ' name of file containing layer as layer_basename') logger.error(msg) layer_basename = os.path.basename(upfile.file.name) internal_layer_name = layer_basename # Use underscores in place of dots & spaces. layer_basename = re.sub('[. ]', '_', layer_basename) layer_name = self.uniquish_layer_name(layer_basename) with db.transaction.atomic(): while UploadLayer.objects.filter( name=layer_name).exists(): layer_name = self.uniquish_layer_name( layer_basename) upload_layer = UploadLayer( upload_file=upfile, name=layer_name, internal_layer_name=internal_layer_name, layer_name=layer_name, layer_type=layer_desc['layer_type'], fields=layer_desc.get('fields', {}), index=layer_desc.get('index'), feature_count=layer_desc.get( 'feature_count', None), configuration_options=configuration_options) # If we wait for upload.save(), we may introduce layer_name collisions. upload_layer.save() upload.uploadlayer_set.add(upload_layer) upload.size = sum(upfile.file.size for upfile in upfiles) upload.complete = True upload.state = 'UPLOADED' upload.save()