def __init__(self, collection, acquisition, dataset): self.collection = collection self.datacube = collection.datacube self.db = IngestDBWrapper(self.datacube.db_connection) dataset_key = collection.get_dataset_key(dataset) self.dataset_bands = collection.new_bands[dataset_key] self.dataset = dataset self.mdd = dataset.metadata_dict self.dataset_dict = {} for field in self.DATASET_METADATA_FIELDS: self.dataset_dict[field] = self.mdd[field] self.dataset_dict['acquisition_id'] = acquisition.acquisition_id self.dataset_dict['crs'] = self.mdd['projection'] self.dataset_dict['level_name'] = self.mdd['processing_level'] self.dataset_dict['level_id'] = \ self.db.get_level_id(self.dataset_dict['level_name']) self.dataset_dict['dataset_id'] = \ self.db.get_dataset_id(self.dataset_dict) if self.dataset_dict['dataset_id'] is None: # create a new dataset record in the database self.dataset_dict['dataset_id'] = \ self.db.insert_dataset_record(self.dataset_dict) self.needs_update = False else: # check that the old dataset record can be updated self.__check_update_ok() self.needs_update = True self.dataset_id = self.dataset_dict['dataset_id']
def update_tile_footprint(self): """Update the tile footprint entry in the database""" if not self.db.tile_footprint_exists(self.tile_dict): # We may need to create a new footprint record. footprint_dict = { 'x_index': self.tile_footprint[0], 'y_index': self.tile_footprint[1], 'tile_type_id': self.tile_type_id, 'x_min': self.tile_contents.tile_extents[0], 'y_min': self.tile_contents.tile_extents[1], 'x_max': self.tile_contents.tile_extents[2], 'y_max': self.tile_contents.tile_extents[3], 'bbox': 'Populate this within sql query?' } # Create an independent database connection for this transaction. my_db = IngestDBWrapper(self.datacube.create_connection()) try: with self.collection.transaction(my_db): if not my_db.tile_footprint_exists(self.tile_dict): my_db.insert_tile_footprint(footprint_dict) except psycopg2.IntegrityError: # If we get an IntegrityError we assume the tile_footprint # is already in the database, and we do not need to add it. pass finally: my_db.close()
def __init__(self, collection, dataset): self.collection = collection self.datacube = collection.datacube self.db = IngestDBWrapper(self.datacube.db_connection) self.acquisition_dict = {} self.acquisiton_id = None # set below # Fill a dictonary with data for the acquisition. # Start with fields from the dataset metadata. for field in self.ACQUISITION_METADATA_FIELDS: self.acquisition_dict[field] = dataset.metadata_dict[field] # Next look up the satellite_id and sensor_id in the # database and fill these in. self.acquisition_dict['satellite_id'] = \ self.db.get_satellite_id(self.acquisition_dict['satellite_tag']) self.acquisition_dict['sensor_id'] = \ self.db.get_sensor_id(self.acquisition_dict['satellite_id'], self.acquisition_dict['sensor_name']) # Finally look up the acquisiton_id, or create a new record if it # does not exist, and fill it into the dictionary. self.acquisition_id = \ self.db.get_acquisition_id_fuzzy(self.acquisition_dict) if self.acquisition_id is None: self.acquisition_id = \ self.db.insert_acquisition_record(self.acquisition_dict) else: # Do we update the acquisition record here? pass self.acquisition_dict['acquisition_id'] = self.acquisition_id
def __init__(self, datacube): """Initialise the collection object.""" self.datacube = datacube self.db = IngestDBWrapper(datacube.db_connection) self.new_bands = self.__reindex_bands(datacube.bands) self.transaction_stack = [] self.temp_tile_directory = os.path.join(self.datacube.tile_root, 'ingest_temp', self.datacube.process_id) create_directory(self.temp_tile_directory)
def __init__(self, collection, dataset_record, tile_contents): self.collection = collection self.datacube = collection.datacube self.dataset_record = dataset_record self.tile_contents = tile_contents self.tile_footprint = tile_contents.tile_footprint self.tile_type_id = tile_contents.tile_type_id #Set tile_class_id to pending. self.tile_class_id = TC_PENDING #Set tile_id, determined below from database query self.tile_id = None self.db = IngestDBWrapper(self.datacube.db_connection) # Fill a dictionary with data for the tile tile_dict = {} self.tile_dict = tile_dict tile_dict['x_index'] = self.tile_footprint[0] tile_dict['y_index'] = self.tile_footprint[1] tile_dict['tile_type_id'] = self.tile_type_id tile_dict['dataset_id'] = self.dataset_record.dataset_id # Store final destination in the 'tile_pathname' field tile_dict['tile_pathname'] = self.tile_contents.tile_output_path tile_dict['tile_class_id'] = 1 # The physical file is currently in the temporary location tile_dict['tile_size'] = \ get_file_size_mb(self.tile_contents .temp_tile_output_path) self.update_tile_footprint() # Make the tile record entry on the database: self.tile_id = self.db.get_tile_id(tile_dict) if self.tile_id is None: self.tile_id = self.db.insert_tile_record(tile_dict) else: # If there was any existing tile corresponding to tile_dict then # it should already have been removed. raise AssertionError("Attempt to recreate an existing tile.") tile_dict['tile_id'] = self.tile_id