Beispiel #1
0
def get_harvest_collection(delete=False,
                           late_deletion=False,
                           with_store=False,
                           already_stored=False):
    pf_bad = PipelineFile(BAD_NC,
                          is_deletion=delete,
                          late_deletion=late_deletion)
    pf_empty = PipelineFile(EMPTY_NC,
                            is_deletion=delete,
                            late_deletion=late_deletion)
    pf_good = PipelineFile(GOOD_NC,
                           is_deletion=delete,
                           late_deletion=late_deletion)

    collection = PipelineFileCollection([pf_bad, pf_empty, pf_good])

    if with_store:
        publish_type = PipelineFilePublishType.DELETE_UNHARVEST if delete else PipelineFilePublishType.HARVEST_UPLOAD
    else:
        publish_type = PipelineFilePublishType.UNHARVEST_ONLY if delete else PipelineFilePublishType.HARVEST_ONLY

    for pipeline_file in collection:
        pipeline_file.is_stored = already_stored
        pipeline_file.dest_path = os.path.join(
            'DUMMY', os.path.basename(pipeline_file.src_path))
        pipeline_file.publish_type = publish_type

    return collection
Beispiel #2
0
    def preprocess(self):
        """
        Files to be deleted as found in 'soop_trv_duplicate_url' wfs layer
        """
        files_to_delete = self.state_query.query_wfs_urls_for_layer(
            'soop_trv_duplicate_url')

        for f in files_to_delete:
            file_to_delete = PipelineFile(
                os.path.basename(f),
                is_deletion=True,
                dest_path=f,
                file_update_callback=self._file_update_callback)
            file_to_delete.publish_type = PipelineFilePublishType.DELETE_UNHARVEST
            self.file_collection.add(file_to_delete)
Beispiel #3
0
    def _cleanup_previous_version(self, product_filename):
        """Identify any previously published version(s) of the given product file and mark them for deletion.
        Ignores cases where the previous version has exactly the same file name, as this will simply be overwritten.

        :param product_filename: File name of the newly generated product
        """
        product_type = get_product_type(product_filename)
        for old_product_url in self.old_product_files.get(product_type, []):
            if os.path.basename(old_product_url) != product_filename:
                # Add the previous version as a "late deletion". It will be deleted during the handler's `publish`
                # step after (and only if) all new files have been successfully published.
                old_file = PipelineFile(
                    old_product_url,
                    dest_path=old_product_url,
                    is_deletion=True,
                    late_deletion=True,
                    file_update_callback=self._file_update_callback)
                old_file.publish_type = PipelineFilePublishType.DELETE_UNHARVEST
                self.file_collection.add(old_file)
Beispiel #4
0
def get_notification_data():
    collection = PipelineFileCollection(PipelineFile(GOOD_NC))
    collection_headers, collection_data = collection.get_table_data()

    data = {
        'input_file': 'good.nc',
        'processing_result': 'HANDLER_SUCCESS',
        'handler_start_time': '2017-10-23 16:05',
        'checks': None,
        'collection_headers': collection_headers,
        'collection_data': collection_data,
        'error_details': '',
        'upload_dir': None
    }

    return data
Beispiel #5
0
    def test_dstg(self):
        preexisting_file = PipelineFileCollection()
        existing_file = PipelineFile(DSTG, dest_path=os.path.join(
            'Department_of_Defence/DSTG/slocum_glider/TalismanSaberB20130706/', os.path.basename(DSTG)))

        preexisting_file.update([existing_file])

        # set the files to UPLOAD_ONLY
        preexisting_file.set_publish_types(PipelineFilePublishType.UPLOAD_ONLY)

        # upload the 'preexisting_files' collection to the unit test's temporary upload location
        broker = get_storage_broker(self.config.pipeline_config['global']['upload_uri'])
        broker.upload(preexisting_file)

        # test processing of DSTG and NRL NetCDF files
        handler = self.run_handler(DSTG)

        f = handler.file_collection[0]
        self.assertEqual(f.publish_type, PipelineFilePublishType.HARVEST_UPLOAD)
        self.assertEqual(f.dest_path,
                         'Department_of_Defence/DSTG/slocum_glider/TalismanSaberB20130706/' + f.name)
        self.assertTrue(f.is_stored)
        self.assertTrue(f.is_harvested)