Example #1
0
        def set_dataset_metadata(record, dataset_key):
            SQL = '''-- Attempt to insert dataset_metadata records
insert into dataset_metadata(
    dataset_type_id,
    dataset_id,
    metadata_xml
    )
select
  %(dataset_type_id)s,
  %(dataset_id)s,
  %(xml_text)s::xml
where not exists (
    select * from dataset_metadata
    where dataset_type_id = %(dataset_type_id)s
        and dataset_id = %(dataset_id)s
        )
    and xml_is_well_formed(%(xml_text)s)
'''
            params = {'dataset_type_id': dataset_key[0],
                      'dataset_id': dataset_key[1],
                      'xml_text': record['xml_text']
                      }
        
            log_multiline(logger.debug, self.database.default_cursor.mogrify(SQL, params), 'Mogrified SQL', '\t')
            
            if self.dryrun:
                return
            
            self.database.submit_query(SQL, params)
Example #2
0
        def set_dataset_metadata(record, dataset_key):
            SQL = '''-- Attempt to insert dataset_metadata records
insert into dataset_metadata(
    dataset_type_id,
    dataset_id,
    metadata_xml
    )
select
  %(dataset_type_id)s,
  %(dataset_id)s,
  %(xml_text)s::xml
where not exists (
    select * from dataset_metadata
    where dataset_type_id = %(dataset_type_id)s
        and dataset_id = %(dataset_id)s
        )
    and xml_is_well_formed(%(xml_text)s)
'''
            params = {
                'dataset_type_id': dataset_key[0],
                'dataset_id': dataset_key[1],
                'xml_text': record['xml_text']
            }

            log_multiline(logger.debug,
                          self.database.default_cursor.mogrify(SQL, params),
                          'Mogrified SQL', '\t')

            if self.dryrun:
                return

            self.database.submit_query(SQL, params)
Example #3
0
        def get_dataset_key(record, observation_key):
            '''
            Function to write observation (acquisition) record if required and return dataset ID (tuple containing dataset_type_id & dataset_id)
            '''
            SQL = '''-- Attempt to insert a dataset record and return dataset_id
insert into dataset(
    dataset_type_id,
    dataset_id,
    observation_type_id,
    observation_id,
    dataset_location,
    creation_datetime
    )
select
    (select dataset_type_id from dataset_type where dataset_type_tag = %(dataset_type_tag)s),
    nextval('dataset_id_seq'::regclass),
    %(observation_type_id)s,
    %(observation_id)s,
    %(dataset_location)s,
    %(creation_datetime)s
where not exists (
    select dataset_id from dataset
    where observation_type_id = %(observation_type_id)s
        and observation_id = %(observation_id)s
        and dataset_location = %(dataset_location)s
    );

select dataset_type_id, dataset_id from dataset
where observation_type_id = %(observation_type_id)s
    and observation_id = %(observation_id)s
    and dataset_location = %(dataset_location)s
'''
            params = {
                'dataset_type_tag':
                'PQ'
                if record['level_name'] == 'PQA' else record['level_name'],
                'observation_type_id':
                observation_key[0],
                'observation_id':
                observation_key[1],
                'dataset_location':
                record['dataset_path'],
                'creation_datetime':
                record['datetime_processed'].replace(
                    tzinfo=pytz.UTC)  # Convert naiive time to UTC
            }

            log_multiline(logger.debug,
                          self.database.default_cursor.mogrify(SQL, params),
                          'Mogrified SQL', '\t')

            if self.dryrun:
                return -1

            dataset_id_result = self.database.submit_query(SQL, params)
            assert dataset_id_result.record_count == 1, '%d records retrieved for dataset_id query'
            return (dataset_id_result.field_values['dataset_type_id'][0],
                    dataset_id_result.field_values['dataset_id'][0])
Example #4
0
        def set_storage_dimension(storage_key, dimension_key,
                                  min_index_max_tuple):
            '''
            Function to write storage_dimension record if required
            '''
            SQL = '''-- Attempt to insert storage_dimension record
insert into storage_dimension(
    storage_type_id,
    storage_id,
    storage_version,
    domain_id,
    dimension_id,
    storage_dimension_index,
    storage_dimension_min,
    storage_dimension_max
    )
select
    %(storage_type_id)s,
    %(storage_id)s,
    %(storage_version)s,
    %(domain_id)s,
    %(dimension_id)s,
    %(storage_dimension_index)s,
    %(storage_dimension_min)s,
    %(storage_dimension_max)s
where not exists (
    select * from storage_dimension
    where storage_type_id = %(storage_type_id)s
        and storage_id = %(storage_id)s
        and storage_version = %(storage_version)s
        and domain_id = %(domain_id)s
        and dimension_id = %(dimension_id)s
    );
'''
            params = {
                'storage_type_id': storage_key[0],
                'storage_id': storage_key[1],
                'storage_version': storage_key[2],
                'domain_id': dimension_key[0],
                'dimension_id': dimension_key[1],
                'storage_dimension_min': min_index_max_tuple[0],
                'storage_dimension_index': min_index_max_tuple[1],
                'storage_dimension_max': min_index_max_tuple[2]
            }

            log_multiline(logger.debug, SQL, 'SQL', '\t')
            log_multiline(logger.debug,
                          self.database.default_cursor.mogrify(SQL, params),
                          'Mogrified SQL', '\t')

            if self.dryrun:
                return

            self.database.submit_query(SQL, params)
Example #5
0
        def get_observation_key(record):
            '''
            Function to write observation (acquisition) record if required and return observation ID (tuple containing observation_type_id and observation_id)
            '''
            SQL = '''-- Attempt to insert an observation record and return observation key
insert into observation(
    observation_type_id,
    observation_id,
    observation_start_datetime,
    observation_end_datetime,
    instrument_type_id,
    instrument_id
    )
select
    1, -- Optical Satellite
    nextval('observation_id_seq'::regclass),
    %(observation_start_datetime)s,
    %(observation_end_datetime)s,
    1, -- Passive Satellite-borne
    (select instrument_id from instrument where instrument_tag = %(instrument_tag)s)
where not exists (
    select observation_id from observation
    where observation_type_id = 1 -- Optical Satellite
    and instrument_type_id = 1 -- Passive Satellite-borne
    and instrument_id = (select instrument_id from instrument where instrument_tag = %(instrument_tag)s)
    and observation_start_datetime = %(observation_start_datetime)s
    and observation_end_datetime = %(observation_end_datetime)s
    );

select observation_type_id, observation_id from observation
where observation_type_id = 1 -- Optical Satellite
    and instrument_type_id = 1 -- Passive Satellite-borne
    and instrument_id = (select instrument_id from instrument where instrument_tag = %(instrument_tag)s)
    and observation_start_datetime = %(observation_start_datetime)s
    and observation_end_datetime = %(observation_end_datetime)s;
'''
            params = {
                'instrument_tag': record['sensor_name'],
                'observation_start_datetime': record['start_datetime'],
                'observation_end_datetime': record['end_datetime']
            }

            log_multiline(logger.debug,
                          self.database.default_cursor.mogrify(SQL, params),
                          'Mogrified SQL', '\t')

            if self.dryrun:
                return (None, None)

            observation_id_result = self.database.submit_query(SQL, params)
            assert observation_id_result.record_count == 1, '%d records retrieved for observation_id query'
            return (
                observation_id_result.field_values['observation_type_id'][0],
                observation_id_result.field_values['observation_id'][0])
Example #6
0
        def get_storage_key(record, storage_unit_path):
            '''
            Function to write storage unit record if required and return storage unit ID (tuple containing storage_type_id & storage_id)
            '''
            SQL = '''-- Attempt to insert a storage record and return storage key 
insert into storage(
    storage_type_id,
    storage_id,
    storage_version,
    storage_location,
    md5_checksum,
    storage_bytes,
    spatial_footprint_id
    )  
select
    %(storage_type_id)s,
    nextval('storage_id_seq'::regclass),
    0, -- storage_version
    %(storage_location)s,
    NULL,
    NULL,
    NULL
where not exists (
    select storage_type_id, storage_id, storage_version from storage 
    where storage_type_id =%(storage_type_id)s
    and storage_location = %(storage_location)s
    );
            
select storage_type_id, storage_id, storage_version from storage
where storage_type_id =%(storage_type_id)s
    and storage_location = %(storage_location)s;
'''
            params = {
                'storage_type_id':
                self.storage_type_config['storage_type_id'],
                'storage_location':
                self.get_storage_filename(self.storage_type, storage_indices)
            }

            log_multiline(logger.debug,
                          self.database.default_cursor.mogrify(SQL, params),
                          'Mogrified SQL', '\t')

            if self.dryrun:
                return (None, None, None)

            storage_id_result = self.database.submit_query(SQL, params)
            assert storage_id_result.record_count == 1, '%d records retrieved for storage_id query'
            return (storage_id_result.field_values['storage_type_id'][0],
                    storage_id_result.field_values['storage_id'][0],
                    storage_id_result.field_values['storage_version'][0])
Example #7
0
        def set_storage_dimension(storage_key, dimension_key, min_index_max_tuple):
            '''
            Function to write storage_dimension record if required
            '''
            SQL = '''-- Attempt to insert storage_dimension record
insert into storage_dimension(
    storage_type_id,
    storage_id,
    storage_version,
    domain_id,
    dimension_id,
    storage_dimension_index,
    storage_dimension_min,
    storage_dimension_max
    )
select
    %(storage_type_id)s,
    %(storage_id)s,
    %(storage_version)s,
    %(domain_id)s,
    %(dimension_id)s,
    %(storage_dimension_index)s,
    %(storage_dimension_min)s,
    %(storage_dimension_max)s
where not exists (
    select * from storage_dimension
    where storage_type_id = %(storage_type_id)s
        and storage_id = %(storage_id)s
        and storage_version = %(storage_version)s
        and domain_id = %(domain_id)s
        and dimension_id = %(dimension_id)s
    );
'''
            params = {'storage_type_id': storage_key[0],
                      'storage_id': storage_key[1],
                      'storage_version': storage_key[2],
                      'domain_id': dimension_key[0],
                      'dimension_id': dimension_key[1],
                      'storage_dimension_min': min_index_max_tuple[0],
                      'storage_dimension_index': min_index_max_tuple[1],
                      'storage_dimension_max': min_index_max_tuple[2]
                      }

            log_multiline(logger.debug, SQL, 'SQL', '\t')
            log_multiline(logger.debug, self.database.default_cursor.mogrify(SQL, params), 'Mogrified SQL', '\t')
            
            if self.dryrun:
                return
            
            self.database.submit_query(SQL, params)
Example #8
0
        def get_observation_key(record):
            '''
            Function to write observation (acquisition) record if required and return observation ID (tuple containing observation_type_id and observation_id)
            '''
            SQL = '''-- Attempt to insert an observation record and return observation key
insert into observation(
    observation_type_id,
    observation_id,
    observation_start_datetime,
    observation_end_datetime,
    instrument_type_id,
    instrument_id
    )
select
    1, -- Optical Satellite
    nextval('observation_id_seq'::regclass),
    %(observation_start_datetime)s,
    %(observation_end_datetime)s,
    1, -- Passive Satellite-borne
    (select instrument_id from instrument where instrument_tag = %(instrument_tag)s)
where not exists (
    select observation_id from observation
    where observation_type_id = 1 -- Optical Satellite
    and instrument_type_id = 1 -- Passive Satellite-borne
    and instrument_id = (select instrument_id from instrument where instrument_tag = %(instrument_tag)s)
    and observation_start_datetime = %(observation_start_datetime)s
    and observation_end_datetime = %(observation_end_datetime)s
    );

select observation_type_id, observation_id from observation
where observation_type_id = 1 -- Optical Satellite
    and instrument_type_id = 1 -- Passive Satellite-borne
    and instrument_id = (select instrument_id from instrument where instrument_tag = %(instrument_tag)s)
    and observation_start_datetime = %(observation_start_datetime)s
    and observation_end_datetime = %(observation_end_datetime)s;
'''
            params = {'instrument_tag': record['sensor_name'],
                      'observation_start_datetime': record['start_datetime'],
                      'observation_end_datetime': record['end_datetime']
                      }
            
            log_multiline(logger.debug, self.database.default_cursor.mogrify(SQL, params), 'Mogrified SQL', '\t')
            
            if self.dryrun:
                return (None, None)
            
            observation_id_result = self.database.submit_query(SQL, params)
            assert observation_id_result.record_count == 1, '%d records retrieved for observation_id query'
            return (observation_id_result.field_values['observation_type_id'][0], 
                    observation_id_result.field_values['observation_id'][0])
Example #9
0
        def set_dataset_dimensions(dataset_key, dimension_key,
                                   min_index_max_tuple):
            '''
            Function to write dataset_dimension record if required
            '''
            SQL = '''-- Attempt to insert dataset_dimension records
insert into dataset_dimension(
    dataset_type_id,
    dataset_id,
    domain_id,
    dimension_id,
    min_value,
    max_value,
    indexing_value
    )
select
  %(dataset_type_id)s,
  %(dataset_id)s,
  %(domain_id)s,
  %(dimension_id)s,
  %(min_value)s,
  %(max_value)s,
  %(indexing_value)s
where not exists (
    select * from dataset_dimension
    where dataset_type_id = %(dataset_type_id)s
        and dataset_id = %(dataset_id)s
        and domain_id = %(domain_id)s
        and dimension_id = %(dimension_id)s
    );
'''
            params = {
                'dataset_type_id': dataset_key[0],
                'dataset_id': dataset_key[1],
                'domain_id': dimension_key[0],
                'dimension_id': dimension_key[1],
                'min_value': min_index_max_tuple[0],
                'indexing_value': min_index_max_tuple[1],
                'max_value': min_index_max_tuple[2]
            }

            log_multiline(logger.debug,
                          self.database.default_cursor.mogrify(SQL, params),
                          'Mogrified SQL', '\t')

            if self.dryrun:
                return

            self.database.submit_query(SQL, params)
Example #10
0
        def get_dataset_key(record, observation_key):
            '''
            Function to write observation (acquisition) record if required and return dataset ID (tuple containing dataset_type_id & dataset_id)
            '''
            SQL = '''-- Attempt to insert a dataset record and return dataset_id
insert into dataset(
    dataset_type_id,
    dataset_id,
    observation_type_id,
    observation_id,
    dataset_location,
    creation_datetime
    )
select
    (select dataset_type_id from dataset_type where dataset_type_tag = %(dataset_type_tag)s),
    nextval('dataset_id_seq'::regclass),
    %(observation_type_id)s,
    %(observation_id)s,
    %(dataset_location)s,
    %(creation_datetime)s
where not exists (
    select dataset_id from dataset
    where observation_type_id = %(observation_type_id)s
        and observation_id = %(observation_id)s
        and dataset_location = %(dataset_location)s
    );

select dataset_type_id, dataset_id from dataset
where observation_type_id = %(observation_type_id)s
    and observation_id = %(observation_id)s
    and dataset_location = %(dataset_location)s
'''
            params = {'dataset_type_tag': 'PQ' if record['level_name'] == 'PQA' else record['level_name'],
                      'observation_type_id': observation_key[0],
                      'observation_id': observation_key[1],
                      'dataset_location': record['dataset_path'],
                      'creation_datetime': record['datetime_processed'].replace(tzinfo=pytz.UTC) # Convert naiive time to UTC
                      }
            
            log_multiline(logger.debug, self.database.default_cursor.mogrify(SQL, params), 'Mogrified SQL', '\t')
            
            if self.dryrun:
                return -1
            
            dataset_id_result = self.database.submit_query(SQL, params)
            assert dataset_id_result.record_count == 1, '%d records retrieved for dataset_id query'
            return (dataset_id_result.field_values['dataset_type_id'][0], 
                    dataset_id_result.field_values['dataset_id'][0])
Example #11
0
        def get_storage_key(record, storage_unit_path):
            '''
            Function to write storage unit record if required and return storage unit ID (tuple containing storage_type_id & storage_id)
            '''
            SQL ='''-- Attempt to insert a storage record and return storage key 
insert into storage(
    storage_type_id,
    storage_id,
    storage_version,
    storage_location,
    md5_checksum,
    storage_bytes,
    spatial_footprint_id
    )  
select
    %(storage_type_id)s,
    nextval('storage_id_seq'::regclass),
    0, -- storage_version
    %(storage_location)s,
    NULL,
    NULL,
    NULL
where not exists (
    select storage_type_id, storage_id, storage_version from storage 
    where storage_type_id =%(storage_type_id)s
    and storage_location = %(storage_location)s
    );
            
select storage_type_id, storage_id, storage_version from storage
where storage_type_id =%(storage_type_id)s
    and storage_location = %(storage_location)s;
'''            
            params = {'storage_type_id': self.storage_type_config['storage_type_id'],
                      'storage_location': self.get_storage_filename(self.storage_type, storage_indices)
                      }
            
            log_multiline(logger.debug, self.database.default_cursor.mogrify(SQL, params), 'Mogrified SQL', '\t')
            
            if self.dryrun:
                return (None, None, None)
            
            storage_id_result = self.database.submit_query(SQL, params)
            assert storage_id_result.record_count == 1, '%d records retrieved for storage_id query'
            return (storage_id_result.field_values['storage_type_id'][0], 
                    storage_id_result.field_values['storage_id'][0],
                    storage_id_result.field_values['storage_version'][0])
Example #12
0
        def set_dataset_dimensions(dataset_key, dimension_key, min_index_max_tuple):
            '''
            Function to write dataset_dimension record if required
            '''
            SQL = '''-- Attempt to insert dataset_dimension records
insert into dataset_dimension(
    dataset_type_id,
    dataset_id,
    domain_id,
    dimension_id,
    min_value,
    max_value,
    indexing_value
    )
select
  %(dataset_type_id)s,
  %(dataset_id)s,
  %(domain_id)s,
  %(dimension_id)s,
  %(min_value)s,
  %(max_value)s,
  %(indexing_value)s
where not exists (
    select * from dataset_dimension
    where dataset_type_id = %(dataset_type_id)s
        and dataset_id = %(dataset_id)s
        and domain_id = %(domain_id)s
        and dimension_id = %(dimension_id)s
    );
'''
            params = {'dataset_type_id': dataset_key[0],
                      'dataset_id': dataset_key[1],
                      'domain_id': dimension_key[0],
                      'dimension_id': dimension_key[1],
                      'min_value': min_index_max_tuple[0],
                      'indexing_value': min_index_max_tuple[1],
                      'max_value': min_index_max_tuple[2]
                      }
            
            log_multiline(logger.debug, self.database.default_cursor.mogrify(SQL, params), 'Mogrified SQL', '\t')
            
            if self.dryrun:
                return
            
            self.database.submit_query(SQL, params)
Example #13
0
        def set_storage_dataset(storage_key, dataset_key):
            '''
            Function to write storage_dataset record if required
            '''
            SQL = '''-- Attempt to insert storage_dataset record
insert into storage_dataset(
    storage_type_id,
    storage_id,
    storage_version,
    dataset_type_id,
    dataset_id
    )
select
    %(storage_type_id)s,
    %(storage_id)s,
    %(storage_version)s,
    %(dataset_type_id)s,
    %(dataset_id)s
where not exists (
    select * from storage_dataset
    where storage_type_id = %(storage_type_id)s
        and storage_id = %(storage_id)s
        and storage_version = %(storage_version)s
        and dataset_type_id = %(dataset_type_id)s
        and dataset_id = %(dataset_id)s
    );
'''
            params = {
                'storage_type_id': storage_key[0],
                'storage_id': storage_key[1],
                'storage_version': storage_key[2],
                'dataset_type_id': dataset_key[0],
                'dataset_id': dataset_key[1],
            }

            log_multiline(logger.debug,
                          self.database.default_cursor.mogrify(SQL, params),
                          'Mogrified SQL', '\t')

            if self.dryrun:
                return

            self.database.submit_query(SQL, params)
Example #14
0
        def set_storage_dataset(storage_key, dataset_key):
            '''
            Function to write storage_dataset record if required
            '''
            SQL = '''-- Attempt to insert storage_dataset record
insert into storage_dataset(
    storage_type_id,
    storage_id,
    storage_version,
    dataset_type_id,
    dataset_id
    )
select
    %(storage_type_id)s,
    %(storage_id)s,
    %(storage_version)s,
    %(dataset_type_id)s,
    %(dataset_id)s
where not exists (
    select * from storage_dataset
    where storage_type_id = %(storage_type_id)s
        and storage_id = %(storage_id)s
        and storage_version = %(storage_version)s
        and dataset_type_id = %(dataset_type_id)s
        and dataset_id = %(dataset_id)s
    );
'''
            params = {'storage_type_id': storage_key[0],
                      'storage_id': storage_key[1],
                      'storage_version': storage_key[2],
                      'dataset_type_id': dataset_key[0],
                      'dataset_id': dataset_key[1],
                      }
            
            log_multiline(logger.debug, self.database.default_cursor.mogrify(SQL, params), 'Mogrified SQL', '\t')
            
            if self.dryrun:
                return
            
            self.database.submit_query(SQL, params)
Example #15
0
class AGDC2GDF(GDF):
    DEFAULT_CONFIG_FILE = 'agdc2gdf_default.conf'  # N.B: Assumed to reside in code root directory
    ARG_DESCRIPTORS = {
        'xmin': {
            'short_flag': '-x1',
            'long_flag': '--xmin',
            'default': None,
            'action': 'store',
            'const': None,
            'help':
            'Minimum X inclusive t (longitude) of spatial range to process'
        },
        'xmax': {
            'short_flag': '-x2',
            'long_flag': '--xmax',
            'default': None,
            'action': 'store',
            'const': None,
            'help':
            'Maximum X inclusive t (longitude) of spatial range to process'
        },
        'ymin': {
            'short_flag': '-y1',
            'long_flag': '--ymin',
            'default': None,
            'action': 'store',
            'const': None,
            'help':
            'Minimum Y inclusive t (latitude) of spatial range to process'
        },
        'ymax': {
            'short_flag': '-y2',
            'long_flag': '--ymax',
            'default': None,
            'action': 'store',
            'const': None,
            'help':
            'Maximum Y inclusive t (latitude) of spatial range to process'
        },
        'tmin': {
            'short_flag': '-t1',
            'long_flag': '--tmin',
            'default': None,
            'action': 'store',
            'const': None,
            'help': 'Minimum t inclusive t (years) of spatial range to process'
        },
        'tmax': {
            'short_flag': '-t2',
            'long_flag': '--tmax',
            'default': None,
            'action': 'store',
            'const': None,
            'help': 'Maximum inclusive t (years) of spatial range to process'
        },
        'storage_type': {
            'short_flag': '-st',
            'long_flag': '--storage_type',
            'default': None,
            'action': 'store',
            'const': None,
            'help': 'GDF storage type to populate'
        },
        'satellite': {
            'short_flag': '-sa',
            'long_flag': '--satellite',
            'default': None,
            'action': 'store',
            'const': None,
            'help': 'AGDC satellite to process'
        },
        'sensors': {
            'short_flag': '-se',
            'long_flag': '--sensors',
            'default': None,
            'action': 'store',
            'const': None,
            'help': 'Comma-separated list of AGDC sensors to process'
        },
        'level': {
            'short_flag': '-l',
            'long_flag': '--level',
            'default': None,
            'action': 'store',
            'const': None,
            'help': 'AGDC processing level to process'
        },
        'temp_dir': {
            'short_flag': '-t',
            'long_flag': '--temp_dir',
            'default': None,
            'action': 'store',
            'const': None,
            'help': 'Temporary directory for AGDC2GDF operation'
        },
        'force': {
            'short_flag': '-f',
            'long_flag': '--force',
            'default': False,
            'action': 'store_const',
            'const': True,
            'help': 'Flag to force replacement of existing files'
        },
        'dryrun': {
            'short_flag': '-dr',
            'long_flag': '--dryrun',
            'default': False,
            'action': 'store_const',
            'const': True,
            'help': 'Flag to skip file writing and SQL query execution'
        },
    }

    def __init__(self):
        '''Constructor for class AGDC2GDF
        '''
        self._code_root = os.path.abspath(
            os.path.dirname(__file__))  # Directory containing module code
        self._gdf_root = os.path.abspath(os.path.dirname(
            gdf.__file__))  # Directory containing module code

        # Create master configuration dict containing both command line and config_file parameters
        self._command_line_params = self._get_command_line_params(
            AGDC2GDF.ARG_DESCRIPTORS)

        self.dryrun = self._command_line_params['dryrun']

        agdc2gdf_config_file = self._command_line_params[
            'config_files'] or os.path.join(self._code_root,
                                            AGDC2GDF.DEFAULT_CONFIG_FILE)

        agdc2gdf_config_file_object = ConfigFile(agdc2gdf_config_file)

        # Comma separated list of GDF config files specified in master config file
        gdf_config_files_string = agdc2gdf_config_file_object.configuration[
            'gdf'].get('config_files') or os.path.join(self._gdf_root,
                                                       GDF.DEFAULT_CONFIG_FILE)

        # Create master GDF configuration dict containing both command line and config_file parameters
        self._configuration = self._get_config(gdf_config_files_string)

        self.temp_dir = self._command_line_params.get(
            'temp_dir'
        ) or agdc2gdf_config_file_object.configuration['agdc']['temp_dir']
        # Try to create temp & cache directories if they don't exist
        if not directory_writable(self.temp_dir):
            new_temp_dir = os.path.join(os.path.expanduser("~"), 'gdf', 'temp')
            logger.warning(
                'Unable to access temporary directory %s. Using %s instead.',
                self.temp_dir, new_temp_dir)
            self.temp_dir = new_temp_dir
            if not directory_writable(self.temp_dir):
                raise Exception('Unable to write to temporary directory %s',
                                self.temp_dir)

        # Create master GDF database dictorage_config
        self._databases = self._get_dbs()

        self.force = self._command_line_params.get(
            'force'
        ) or agdc2gdf_config_file_object.configuration['agdc2gdf'].get('force')

        logger.debug("self._command_line_params.get('storage_type') = %s",
                     self._command_line_params.get('storage_type'))
        self.storage_type = self._command_line_params.get(
            'storage_type'
        ) or agdc2gdf_config_file_object.configuration['gdf']['storage_type']

        self.agdc_satellite = self._command_line_params.get(
            'satellite'
        ) or agdc2gdf_config_file_object.configuration['agdc']['satellite']

        self.agdc_sensors = self._command_line_params.get(
            'sensors'
        ) or agdc2gdf_config_file_object.configuration['agdc']['sensors']
        self.agdc_sensors = tuple(self.agdc_sensors.split(','))

        self.agdc_level = self._command_line_params.get(
            'level'
        ) or agdc2gdf_config_file_object.configuration['agdc']['level']

        # Read GDF storage configuration from databases
        self._storage_config = self._get_storage_config()
        self.storage_type_config = self._storage_config[self.storage_type]
        self.database = self._databases[self.storage_type_config['db_ref']]

        self.dimensions = self.storage_type_config[
            'dimensions']  # This is used a lot

        # Set up AGDC stuff now
        agdc_config_dict = gdf_config_files_string = agdc2gdf_config_file_object.configuration[
            'agdc']
        try:
            db_ref = agdc_config_dict['db_ref']
            host = agdc_config_dict['host']
            port = agdc_config_dict['port']
            dbname = agdc_config_dict['dbname']
            user = agdc_config_dict['user']
            password = agdc_config_dict['password']

            self.agdc_db = Database(
                db_ref=db_ref,
                host=host,
                port=port,
                dbname=dbname,
                user=user,
                password=password,
                keep_connection=
                False,  # Assume we don't want connections hanging around
                autocommit=True)

            self.agdc_db.submit_query(
                'select 1 as test_field')  # Test DB connection
            logger.debug('Connected to database %s:%s for %s', host, dbname,
                         db_ref)
        except Exception, e:
            logger.error('Unable to connect to database for %s: %s', db_ref,
                         e.message)
            raise e

        # Set self.range_dict from either command line or config file values
        self.range_dict = {}
        for dimension in self.storage_type_config['dimensions']:
            min_value = int(
                self._command_line_params['%smin' % dimension.lower()]
                or agdc2gdf_config_file_object.configuration['agdc2gdf'][
                    '%smin' % dimension.lower()])
            max_value = int(
                self._command_line_params['%smax' % dimension.lower()]
                or agdc2gdf_config_file_object.configuration['agdc2gdf'][
                    '%smax' % dimension.lower()])
            self.range_dict[dimension] = (min_value, max_value)

        log_multiline(logger.debug, self.__dict__, 'AGDC2GDF.__dict__', '\t')