Esempio n. 1
0
def create_import_contexts_table():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    logger.log('info', 'Creating new import contexts table')
    ctx_schema = import_util.read_schema_for_table('contexts')
    # Start from scratch
    db.create_table('import', 'contexts', ctx_schema, ignore_if_exists=False)

    ctx_file = opus_secrets.DICTIONARY_CONTEXTS_FILE
    rows = []
    try:
        with open(ctx_file, 'r') as csvfile:
            filereader = csv.reader(csvfile)
            for row in filereader:
                if len(row) != 3:
                    logger.log('error', 'Bad row in "{ctxfile}": {row}')
                    return False
                name, description, parent = row
                new_row = {
                    'name': name,
                    'description': description,
                    'parent': parent
                }
                rows.append(new_row)
    except IOError as e:
        logger.log('error', f'Failed to read {ctx_file}: {e.strerror}')
        return False

    db.insert_rows('import', 'contexts', rows)

    return True
Esempio n. 2
0
def create_cart():
    # There's really no point in doing this as an import table first,
    # since we're just creating an empty table.
    db = impglobals.DATABASE
    if not db.table_exists('perm', 'obs_general'):
        # We can't create cart here, because it has a foreign key
        # constraint on obs_general. But we needed to have tried because we
        # need to be able to delete things from obs_general and have
        # cart be empty! Chicken and egg.
        # So what we do is check here to see if obs_general exists. If it does,
        # we can go ahead and remove and re-create cart. If it doesn't,
        # then we don't do anything right now but set a flag to say that we'll
        # try again at the end of the import.
        if impglobals.TRY_CART_LATER:
            # Oops! We've already been down this road once, and apparently the
            # creation of obs_general failed. So we can't do anything.
            import_util.log_error(
                'Unable to create "cart" table because "obs_general" doesn\'t exist'
            )
            return
        impglobals.TRY_CART_LATER = True
        import_util.log_warning(
            'Unable to create "cart" table because "obs_general" doesn\'t exist'
            + ' - Will try again later')
        return
    cart_schema = import_util.read_schema_for_table('cart')
    db.drop_table('perm', 'cart')
    db.create_table('perm', 'cart', cart_schema, ignore_if_exists=False)
Esempio n. 3
0
def create_collections():
    # There's really no point in doing this as an import table first,
    # since we're just creating an empty table.
    db = impglobals.DATABASE
    if not db.table_exists('perm', 'obs_general'):
        # We can't create collections here, because it has a foreign key
        # constraint on obs_general. But we needed to have tried because we
        # need to be able to delete things from obs_general and have
        # collections be empty! Chicken and egg.
        # So what we do is check here to see if obs_general exists. If it does,
        # we can go ahead and remove and re-create collections. If it doesn't,
        # then we don't do anything right now but set a flag to say that we'll
        # try again at the end of the import.
        if impglobals.TRY_COLLECTIONS_LATER:
            # Oops! We've already been down this road once, and apparently the
            # creation of obs_general failed. So we can't do anything.
            impglobals.LOGGER.log('error',
    'Unable to create "collections" table because "obs_general" doesn\'t exist')
            return
        impglobals.TRY_COLLECTIONS_LATER = True
        impglobals.LOGGER.log('warning',
    'Unable to create "collections" table because "obs_general" doesn\'t exist'
    +' - Will try again later')
        return
    collections_schema = import_util.read_schema_for_table('collections')
    db.drop_table('perm', 'collections')
    db.create_table('perm', 'collections', collections_schema,
                    ignore_if_exists=False)
Esempio n. 4
0
def create_import_grouping_target_name_table():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    logger.log('info', 'Creating new import grouping_target_name table')
    grouping_target_name_schema = import_util.read_schema_for_table(
        'grouping_target_name')
    # Start from scratch
    db.drop_table('import', 'grouping_target_name')
    db.create_table('import',
                    'grouping_target_name',
                    grouping_target_name_schema,
                    ignore_if_exists=False)

    rows = []
    disp_order = 0

    for abbrev, planet in [('VEN', 'Venus'), ('EAR', 'Earth'), ('MAR', 'Mars'),
                           ('JUP', 'Jupiter'), ('SAT', 'Saturn'),
                           ('URA', 'Uranus'), ('NEP', 'Neptune'),
                           ('PLU', 'Pluto'), ('OTHER', 'Other'),
                           (None, 'NULL')]:
        entry = {
            'value': abbrev,
            'label': planet,
            'display': 'Y',
            'disp_order': disp_order
        }
        disp_order += 1
        rows.append(entry)

    db.insert_rows('import', 'grouping_target_name', rows)
Esempio n. 5
0
def create_import_contexts_table():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    logger.log('info', 'Creating new import contexts table')
    ctx_schema = import_util.read_schema_for_table('contexts')
    # Start from scratch
    db.create_table('import', 'contexts', ctx_schema, ignore_if_exists=False)

    ctx_file = opus_secrets.DICTIONARY_CONTEXTS_FILE
    rows = []
    try:
        with open(ctx_file, 'r') as csvfile:
            filereader = csv.reader(csvfile)
            for row in filereader:
                if len(row) != 3:
                    logger.log('error', 'Bad row in "{ctxfile}": {row}')
                    return False
                name, description, parent = row
                new_row = {
                    'name': name,
                    'description': description,
                    'parent': parent
                }
                rows.append(new_row)
    except IOError as e:
        logger.log('error', f'Failed to read {ctx_file}: {e.strerror}')
        return False

    db.insert_rows('import', 'contexts', rows)

    return True
Esempio n. 6
0
def copy_dictionary_from_import_to_permanent():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    logger.log('info', 'Copying contexts table from import to permanent')
    # Start from scratch
    ctx_schema = import_util.read_schema_for_table('contexts')
    db.drop_table('perm', 'definitions')
    db.drop_table('perm', 'contexts')
    db.create_table('perm', 'contexts', ctx_schema, ignore_if_exists=False)

    db.copy_rows_between_namespaces('import', 'perm', 'contexts')

    logger.log('info', 'Copying definitions table from import to permanent')
    # Start from scratch
    def_schema = import_util.read_schema_for_table('definitions')
    db.create_table('perm', 'definitions', def_schema, ignore_if_exists=False)

    db.copy_rows_between_namespaces('import', 'perm', 'definitions')
Esempio n. 7
0
def drop_cache_tables():
    impglobals.LOGGER.log('info', 'Dropping cache tables')
    table_names = impglobals.DATABASE.table_names('all', prefix='cache_')
    for table_name in table_names:
        impglobals.DATABASE.drop_table('all', table_name)

    user_search_schema = import_util.read_schema_for_table('user_searches')
    impglobals.DATABASE.drop_table('perm', 'user_searches')
    impglobals.DATABASE.create_table('perm', 'user_searches',
                                     user_search_schema)
Esempio n. 8
0
def copy_dictionary_from_import_to_permanent():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    logger.log('info', 'Copying contexts table from import to permanent')
    # Start from scratch
    ctx_schema = import_util.read_schema_for_table('contexts')
    db.drop_table('perm', 'definitions')
    db.drop_table('perm', 'contexts')
    db.create_table('perm', 'contexts', ctx_schema, ignore_if_exists=False)

    db.copy_rows_between_namespaces('import', 'perm', 'contexts')

    logger.log('info', 'Copying definitions table from import to permanent')
    # Start from scratch
    def_schema = import_util.read_schema_for_table('definitions')
    db.create_table('perm', 'definitions', def_schema, ignore_if_exists=False)

    db.copy_rows_between_namespaces('import', 'perm', 'definitions')
Esempio n. 9
0
def copy_param_info_from_import_to_permanent():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    logger.log('info', 'Copying param_info table from import to permanent')
    # Start from scratch
    pi_schema = import_util.read_schema_for_table('param_info')
    db.drop_table('perm', 'param_info')
    db.create_table('perm', 'param_info', pi_schema, ignore_if_exists=False)

    db.copy_rows_between_namespaces('import', 'perm', 'param_info')
Esempio n. 10
0
def drop_cache_tables():
    impglobals.LOGGER.log('info', 'Dropping cache tables')
    table_names = impglobals.DATABASE.table_names('all',
                                                  prefix='cache_')
    for table_name in table_names:
        impglobals.DATABASE.drop_table('all', table_name)

    user_search_schema = import_util.read_schema_for_table('user_searches')
    impglobals.DATABASE.drop_table('perm', 'user_searches')
    impglobals.DATABASE.create_table('perm', 'user_searches',
                                     user_search_schema)
Esempio n. 11
0
def copy_param_info_from_import_to_permanent():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    logger.log('info', 'Copying param_info table from import to permanent')
    # Start from scratch
    pi_schema = import_util.read_schema_for_table('param_info')
    db.drop_table('perm', 'param_info')
    db.create_table('perm', 'param_info', pi_schema, ignore_if_exists=False)

    db.copy_rows_between_namespaces('import', 'perm', 'param_info')
Esempio n. 12
0
def update_mult_info():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    # Find all the permament mult_ tables
    table_names = db.table_names('perm', prefix='mult_')

    for table_name in table_names:
        # Try to figure out what the table name is
        splits = table_name.split('_')
        table_schema = None
        for n_splits in range(3, 6):
            # Covers mult_obs_general to mult_obs_mission_new_horizons
            trial_name = '_'.join(splits[1:n_splits])
            table_schema = import_util.read_schema_for_table(trial_name)
            if table_schema is not None:
                break
        if table_schema is None:
            logger.log('error',
                       f'Unable to find table schema for mult "{table_name}"')
            continue
        mult_field_name = '_'.join(splits[n_splits:])
        for column in table_schema:
            field_name = column['field_name']
            if field_name == mult_field_name:
                break
        else:
            logger.log(
                'error',
                f'Unable to find field "{mult_field_name}" in table "{table_name}"'
            )
            continue

        mult_options = column.get('mult_options', False)
        if not mult_options:
            continue

        for mult_info in mult_options:
            id_num, value, label, disp_order, display, definition = mult_info

            row_dict = {
                'label': str(label),
                'disp_order': disp_order,
                'display': display
            }

            db.update_row('perm', table_name, row_dict, 'id=' + str(id_num))
Esempio n. 13
0
def update_mult_info():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    # Find all the permament mult_ tables
    table_names = db.table_names('perm', prefix='mult_')

    for table_name in table_names:
        # Try to figure out what the table name is
        splits = table_name.split('_')
        table_schema = None
        for n_splits in range(3, 6):
            # Covers mult_obs_general to mult_obs_mission_new_horizons
            trial_name = '_'.join(splits[1:n_splits])
            table_schema = import_util.read_schema_for_table(trial_name)
            if table_schema is not None:
                break
        if table_schema is None:
            logger.log('error',
                f'Unable to find table schema for mult "{table_name}"')
            continue
        mult_field_name = '_'.join(splits[n_splits:])
        for column in table_schema:
            field_name = column['field_name']
            if field_name == mult_field_name:
                break
        else:
            logger.log('error',
        f'Unable to find field "{mult_field_name}" in table "{table_name}"')
            continue

        mult_options = column.get('mult_options', False)
        if not mult_options:
            continue

        for mult_info in mult_options:
            id_num, value, label, disp_order, display, definition = mult_info

            row_dict = {
                'label': str(label),
                'disp_order': disp_order,
                'display': display
            }

            db.update_row('perm', table_name, row_dict,
                          'id='+str(id_num))
Esempio n. 14
0
def create_import_grouping_target_name_table():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    logger.log('info',
                          'Creating new import grouping_target_name table')
    grouping_target_name_schema = import_util.read_schema_for_table(
                                                        'grouping_target_name')
    # Start from scratch
    db.drop_table('import', 'grouping_target_name')
    db.create_table('import', 'grouping_target_name',
                    grouping_target_name_schema,
                    ignore_if_exists=False)

    rows = []
    disp_order = 0

    for abbrev, planet in [('VEN', 'Venus'),
                           ('EAR', 'Earth'),
                           ('MAR', 'Mars'),
                           ('JUP', 'Jupiter'),
                           ('SAT', 'Saturn'),
                           ('URA', 'Uranus'),
                           ('NEP', 'Neptune'),
                           ('PLU', 'Pluto'),
                           ('OTHER', 'Other'),
                           (None,  'NULL')]:
        entry = {
            'value': abbrev,
            'label': planet,
            'display': 'Y',
            'disp_order': disp_order
        }
        disp_order += 1
        rows.append(entry)

    db.insert_rows('import', 'grouping_target_name', rows)
Esempio n. 15
0
def create_import_table_names_table():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    logger.log('info', 'Creating new import table_names table')
    table_names_schema = import_util.read_schema_for_table('table_names')
    # Start from scratch
    db.drop_table('import', 'table_names')
    db.create_table('import',
                    'table_names',
                    table_names_schema,
                    ignore_if_exists=False)

    # We use the entries in data_config to determine what
    # goes into table_names. The order we do things here matters because we're
    # creating disp_order as we go. This will determine the order of how things
    # are displayed on the Details tab.

    rows = []
    disp_order = 0

    # obs_general first
    entry = {
        'table_name': 'obs_general',
        'label': 'General Constraints',
        'display': 'Y',
        'disp_order': disp_order
    }
    disp_order += 1
    rows.append(entry)

    # Then various random tables
    if impglobals.DATABASE.table_exists('perm', 'obs_pds'):
        entry = {
            'table_name': 'obs_pds',
            'label': 'PDS Constraints',
            'display': 'Y',
            'disp_order': disp_order
        }
        disp_order += 1
        rows.append(entry)

    if impglobals.DATABASE.table_exists('perm', 'obs_type_image'):
        entry = {
            'table_name': 'obs_type_image',
            'label': 'Image Constraints',
            'display': 'Y',
            'disp_order': disp_order
        }
        disp_order += 1
        rows.append(entry)

    if impglobals.DATABASE.table_exists('perm', 'obs_wavelength'):
        entry = {
            'table_name': 'obs_wavelength',
            'label': 'Wavelength Constraints',
            'display': 'Y',
            'disp_order': disp_order
        }
        disp_order += 1
        rows.append(entry)

    if impglobals.DATABASE.table_exists('perm', 'obs_profile'):
        entry = {
            'table_name': 'obs_profile',
            'label': 'Occultation/Reflectance Profiles Constraints',
            'display': 'Y',
            'disp_order': disp_order
        }
        disp_order += 1
        rows.append(entry)

    if impglobals.DATABASE.table_exists('perm', 'obs_surface_geometry_name'):
        entry = {
            'table_name': 'obs_surface_geometry_name',
            'label': 'Surface Geometry Constraints',
            'display': 'Y',
            'disp_order': disp_order
        }
        disp_order += 1
        rows.append(entry)

    if impglobals.DATABASE.table_exists('perm', 'obs_surface_geometry'):
        entry = {
            'table_name': 'obs_surface_geometry',
            'label': 'Surface Geometry Constraints',
            'display': 'Y',
            'disp_order': disp_order
        }
        disp_order += 1
        rows.append(entry)

    surface_geo_table_names = impglobals.DATABASE.table_names(
        'perm', prefix='obs_surface_geometry__')
    for table_name in sorted(surface_geo_table_names):
        target_name = table_name.replace('obs_surface_geometry__', '')
        target_name = import_util.decode_target_name(target_name).title()
        entry = {
            'table_name': table_name,
            'label': target_name + ' Surface Geometry Constraints',
            'display': 'Y',
            'disp_order': disp_order
        }
        disp_order += 1
        rows.append(entry)

    if impglobals.DATABASE.table_exists('perm', 'obs_ring_geometry'):
        entry = {
            'table_name': 'obs_ring_geometry',
            'label': 'Ring Geometry Constraints',
            'display': 'Y',
            'disp_order': disp_order
        }
        disp_order += 1
        rows.append(entry)

    # Then missions
    for mission_id in sorted(MISSION_ID_TO_MISSION_TABLE_SFX.keys()):
        table_name = 'obs_mission_' + MISSION_ID_TO_MISSION_TABLE_SFX[
            mission_id]
        if impglobals.DATABASE.table_exists('perm', table_name):
            entry = {
                'table_name':
                table_name,
                'label': (MISSION_ID_TO_MISSION_NAME[mission_id] +
                          ' Mission Constraints'),
                'display':
                'Y',
                'disp_order':
                disp_order
            }
            disp_order += 1
            rows.append(entry)

    # Then instruments
    for instrument_id in sorted(INSTRUMENT_ID_TO_MISSION_ID.keys()):
        display = 'Y'
        if instrument_id[:3] == 'HST':
            # This is a hack because we don't actually have HST instrument
            # tables, but instead put everything in the mission tables
            display = 'N'
        table_name = 'obs_instrument_' + instrument_id.lower()
        if impglobals.DATABASE.table_exists('perm', table_name):
            entry = {
                'table_name':
                table_name,
                'label': (INSTRUMENT_ID_TO_INSTRUMENT_NAME[instrument_id] +
                          ' Constraints'),
                'display':
                display,
                'disp_order':
                disp_order
            }
            disp_order += 1
            rows.append(entry)

    db.insert_rows('import', 'table_names', rows)
Esempio n. 16
0
def create_import_definitions_table():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    logger.log('info', 'Creating new import definitions table')
    def_schema = import_util.read_schema_for_table('definitions')
    # Start from scratch
    db.create_table('import', 'definitions', def_schema, ignore_if_exists=False)

    bad_db = False

    pds_file = opus_secrets.DICTIONARY_PDSDD_FILE
    json_schema_path = opus_secrets.DICTIONARY_JSON_SCHEMA_PATH + '/obs*.json'
    json_list = glob.glob(json_schema_path)

    rows = []

    logger.log('info', f'Importing {pds_file}')

    context = 'PSDD'
    try:
        label = pdsparser.PdsLabel.from_file(pds_file)
    except IOError as e:
        logger.log('error', f'Failed to read {pds_file}: {e.strerror}')
        bad_db = True
    else:
        for item in range(len(label)):
            term = str(label[item]['NAME']).rstrip('\r\n')
            try:
                definition = ' '.join(str(label[item]['DESCRIPTION']).split())
            except KeyError:
                logger.log('warning',
                           f'No description for item {item}: "{term}"')
                continue
            new_row = {
                'term': term,
                'context': context,
                'definition': definition
            }
            rows.append(new_row)

    for file_path in json_list:
        file_name = os.path.basename(file_path)
        file_name = os.path.splitext(file_name)[0]
        logger.log('info', f'Importing {file_name}')
        schema = import_util.read_schema_for_table(file_name)
        for column in schema:
            if 'definition' in column:
                definition = column['definition']
                if column.get('pi_dict_name', None) is None:
                    logger.log('error',
                       'Missing term for "{definition}" in "{file_name}"')
                    bad_db = True
                    continue
                term = column['pi_dict_name']
                if column.get('pi_dict_context', None) is None:
                    logger.log('error',
                       'Missing context for "{definition}" in "{file_name}"')
                    bad_db = True
                    continue
                context = column['pi_dict_context']

                new_row = {
                    'term': term,
                    'context': context,
                    'definition': definition
                }
                rows.append(new_row)

    if bad_db:
        return False

    db.insert_rows('import', 'definitions', rows)

    return True
Esempio n. 17
0
def create_import_partables_table():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    logger.log('info', 'Creating new import partables table')
    partables_schema = import_util.read_schema_for_table('partables')
    # Start from scratch
    db.drop_table('import', 'partables')
    db.create_table('import', 'partables', partables_schema,
                    ignore_if_exists=False)

    # We use the entries in data_config to determine the first part of what
    # goes into partables.

    # For obs_general, we care about mission_id, inst_host_id, instrument_id,
    # and type_id.

    rows = []

    for mission_abbrev in sorted(MISSION_ABBREV_TO_MISSION_TABLE_SFX.keys()):
        entry = {
            'trigger_tab': 'obs_general',
            'trigger_col': 'mission_id',
            'trigger_val': mission_abbrev,
            'partable':    ('obs_mission_'+
                            MISSION_ABBREV_TO_MISSION_TABLE_SFX[mission_abbrev])
        }
        rows.append(entry)

    for instrument_id in sorted(INSTRUMENT_ABBREV_TO_MISSION_ABBREV.keys()):
        partable = 'obs_instrument_'+instrument_id.lower()
        if instrument_id[:3] == 'HST':
            # This is a hack because we don't actually have HST instrument
            # tables, but instead put everything in the mission tables
            partable = 'obs_mission_hubble'
        entry = {
            'trigger_tab': 'obs_general',
            'trigger_col': 'instrument_id',
            'trigger_val': instrument_id,
            'partable':    partable
        }
        rows.append(entry)

    for inst_host_id in sorted(INST_HOST_ABBREV_TO_MISSION_ABBREV.keys()):
        entry = {
            'trigger_tab': 'obs_general',
            'trigger_col': 'inst_host_id',
            'trigger_val': inst_host_id,
            'partable':    ('obs_mission_'+
                            MISSION_ABBREV_TO_MISSION_TABLE_SFX[
                                INST_HOST_ABBREV_TO_MISSION_ABBREV[
                                    inst_host_id]])
        }
        rows.append(entry)

    # We don't need this anymore because Image Constraints are now permanently
    # displayed
    # entry = {
    #     'trigger_tab': 'obs_general',
    #     'trigger_col': 'data_type',
    #     'trigger_val': 'IMG',
    #     'partable':    'obs_type_image'
    # }
    # rows.append(entry)

    surface_geo_table_names = impglobals.DATABASE.table_names(
                                            'perm',
                                            prefix='obs_surface_geometry__')
    for table_name in sorted(surface_geo_table_names):
        target_name = table_name.replace('obs_surface_geometry__', '')
        entry = {
            'trigger_tab': 'obs_surface_geometry',
            'trigger_col': 'target_name',
            'trigger_val': import_util.decode_target_name(target_name).upper(),
            'partable':    table_name
        }
        rows.append(entry)

    db.insert_rows('import', 'partables', rows)
Esempio n. 18
0
def create_import_table_names_table():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    logger.log('info', 'Creating new import table_names table')
    table_names_schema = import_util.read_schema_for_table('table_names')
    # Start from scratch
    db.drop_table('import', 'table_names')
    db.create_table('import', 'table_names', table_names_schema,
                    ignore_if_exists=False)

    # We use the entries in data_config to determine what
    # goes into table_names. The order we do things here matters because we're
    # creating disp_order as we go. This will determine the order of how things
    # are displayed on the Details tab.

    rows = []
    disp_order = 0

    # obs_general first
    entry = {
        'table_name': 'obs_general',
        'label':      'General Constraints',
        'display':    'Y',
        'disp_order': disp_order
    }
    disp_order += 1
    rows.append(entry)

    # Then various random tables
    if impglobals.DATABASE.table_exists('perm', 'obs_pds'):
        entry = {
            'table_name': 'obs_pds',
            'label':      'PDS Constraints',
            'display':    'Y',
            'disp_order': disp_order
        }
        disp_order += 1
        rows.append(entry)

    if impglobals.DATABASE.table_exists('perm', 'obs_type_image'):
        entry = {
            'table_name': 'obs_type_image',
            'label':      'Image Constraints',
            'display':    'Y',
            'disp_order': disp_order
        }
        disp_order += 1
        rows.append(entry)

    if impglobals.DATABASE.table_exists('perm', 'obs_wavelength'):
        entry = {
            'table_name': 'obs_wavelength',
            'label':      'Wavelength Constraints',
            'display':    'Y',
            'disp_order': disp_order
        }
        disp_order += 1
        rows.append(entry)

    if impglobals.DATABASE.table_exists('perm', 'obs_surface_geometry'):
        entry = {
            'table_name': 'obs_surface_geometry',
            'label':      'Surface Geometry Constraints',
            'display':    'Y',
            'disp_order': disp_order
        }
        disp_order += 1
        rows.append(entry)

    surface_geo_table_names = impglobals.DATABASE.table_names(
                                            'perm',
                                            prefix='obs_surface_geometry__')
    for table_name in sorted(surface_geo_table_names):
        target_name = table_name.replace('obs_surface_geometry__', '')
        target_name = import_util.decode_target_name(target_name).title()
        entry = {
            'table_name': table_name,
            'label':      target_name + ' Surface Geometry Constraints',
            'display':    'Y',
            'disp_order': disp_order
        }
        disp_order += 1
        rows.append(entry)

    if impglobals.DATABASE.table_exists('perm', 'obs_ring_geometry'):
        entry = {
            'table_name': 'obs_ring_geometry',
            'label':      'Ring Geometry Constraints',
            'display':    'Y',
            'disp_order': disp_order
        }
        disp_order += 1
        rows.append(entry)

    # Then missions
    for mission_abbrev in sorted(MISSION_ABBREV_TO_MISSION_TABLE_SFX.keys()):
        table_name = 'obs_mission_'+MISSION_ABBREV_TO_MISSION_TABLE_SFX[
                                                            mission_abbrev]
        if impglobals.DATABASE.table_exists('perm', table_name):
            entry = {
                'table_name': table_name,
                'label':      (MISSION_ABBREV_TO_MISSION_NAME[mission_abbrev] +
                               ' Mission Constraints'),
                'display':    'Y',
                'disp_order': disp_order
            }
            disp_order += 1
            rows.append(entry)

    # Then instruments
    for instrument_id in sorted(INSTRUMENT_ABBREV_TO_MISSION_ABBREV.keys()):
        display = 'Y'
        if instrument_id[:3] == 'HST':
            # This is a hack because we don't actually have HST instrument
            # tables, but instead put everything in the mission tables
            display = 'N'
        table_name = 'obs_instrument_'+instrument_id.lower()
        if impglobals.DATABASE.table_exists('perm', table_name):
            entry = {
                'table_name': table_name,
                'label':      (INSTRUMENT_ABBREV_TO_INSTRUMENT_NAME[instrument_id]+
                               ' Constraints'),
                'display':    display,
                'disp_order': disp_order
            }
            disp_order += 1
            rows.append(entry)

    db.insert_rows('import', 'table_names', rows)
Esempio n. 19
0
def create_import_param_info_table():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    logger.log('info', 'Creating new import param_info table')
    pi_schema = import_util.read_schema_for_table('param_info')
    # Start from scratch
    db.drop_table('import', 'param_info')
    db.create_table('import', 'param_info', pi_schema, ignore_if_exists=False)

    # We use the permanent tables to determine what goes into param_info
    table_names = db.table_names('perm', prefix='obs_')

    # read json file for ranges info
    ranges_filename = os.path.join('table_schemas', 'param_info_ranges.json')
    with open(ranges_filename, 'r') as fp:
        try:
            # read contents (str) and convert it to a json object (dict)
            contents = fp.read()
            ranges_json = json.loads(contents)
        except json.decoder.JSONDecodeError:
            logger.log('debug',
                       f'Was reading ranges json file "{ranges_filename}"')
            raise
        except:
            raise

    rows = []
    for table_name in table_names:
        table_schema = import_util.read_schema_for_table(table_name)
        if table_schema is None:
            logger.log('error',
                       f'Unable to read table schema for "{table_name}"')
            return False
        for column in table_schema:
            category_name = column.get('pi_category_name', None)
            if category_name is None:
                continue
            field_name = column.get('field_name', None)
            form_type_str = column.get('pi_form_type', None)
            (form_type, form_type_format,
             form_type_unit_id) = parse_form_type(form_type_str)
            if form_type_unit_id and not is_valid_unit_id(form_type_unit_id):
                logger.log(
                    'error', f'"{form_type_unit_id}" ' +
                    f'in "{category_name}/{field_name}" is not ' +
                    'a valid unit')
                return False
            # if pi_ranges exists in .json, get the corresponding ranges info
            # from dict and convert it to str before storing to database
            ranges = column.get('pi_ranges', None)
            if ranges:
                if ranges in ranges_json:
                    ranges = ranges_json[ranges]
                    ranges = json.dumps(ranges)
                else:
                    logger.log(
                        'error',
                        f'pi_ranges: "{ranges}" is not in "{ranges_filename}"')
                    return False

            new_row = {
                'category_name': category_name,
                'dict_context': column.get('pi_dict_context', None),
                'dict_name': column.get('pi_dict_name', None),
                'dict_context_results': column.get('pi_dict_context_results',
                                                   None),
                'dict_name_results': column.get('pi_dict_name_results', None),
                'disp_order': column['pi_disp_order'],
                'display': column['pi_display'],
                'display_results': column['pi_display_results'],
                'referred_slug': column.get('pi_referred_slug', None),
                'form_type': column.get('pi_form_type', None),
                'intro': column.get('pi_intro', None),
                'label': column.get('pi_label', None),
                'label_results': column.get('pi_label_results', None),
                'name': column.get('field_name', None),
                'slug': column.get('pi_slug', None),
                'old_slug': column.get('pi_old_slug', None),
                'sub_heading': column.get('pi_sub_heading', None),
                'tooltip': column.get('pi_tooltip', None),
                'ranges': ranges,
                'field_hints1': column.get('pi_field_hints1', None),
                'field_hints2': column.get('pi_field_hints2', None),
            }
            rows.append(new_row)
    db.insert_rows('import', 'param_info', rows)

    return True
Esempio n. 20
0
def create_import_param_info_table():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    logger.log('info', 'Creating new import param_info table')
    pi_schema = import_util.read_schema_for_table('param_info')
    # Start from scratch
    db.drop_table('import', 'param_info')
    db.create_table('import', 'param_info', pi_schema, ignore_if_exists=False)

    # We use the permanent tables to determine what goes into param_info
    table_names = db.table_names('perm', prefix='obs_')

    rows = []
    for table_name in table_names:
        table_schema = import_util.read_schema_for_table(table_name)
        if table_schema is None:
            logger.log('error',
                       f'Unable to read table schema for "{table_name}"')
            return False
        for column in table_schema:
            category_name = column.get('pi_category_name', None)
            if category_name is None:
                continue
            # Log an error if value in pi_units is not in unit translation table
            unit = column.get('pi_units', None)
            field_name = column.get('field_name', None)
            if unit and unit not in opus_support.UNIT_TRANSLATION:
                logger.log('error',
                           f'"{unit}" in "{category_name}/{field_name}" is not '
                           +'a valid unit in translation table')
            form_type = column.get('pi_form_type', None)
            if (unit and
                (not form_type or
                 (not form_type.startswith('RANGE%') and
                  not form_type.startswith('LONG%')))):
                logger.log('warning',
                           f'"{category_name}/{field_name}" has units but '
                           +'not form_type RANGE%')
            if form_type == 'RANGE':
                logger.log('warning',
                           f'"{category_name}/{field_name}" has RANGE type '
                           +'without numerical format')

            new_row = {
                'category_name': category_name,
                'dict_context': column['pi_dict_context'],
                'dict_name': column['pi_dict_name'],
                'disp_order': column['pi_disp_order'],
                'display': column['pi_display'],
                'display_results': column['pi_display_results'],
                'form_type': column['pi_form_type'],
                'intro': column['pi_intro'],
                'label': column['pi_label'],
                'label_results': column['pi_label_results'],
                'name': column['field_name'],
                'slug': column['pi_slug'],
                'old_slug': column.get('pi_old_slug', None),
                'sub_heading': column['pi_sub_heading'],
                'tooltip': column['pi_tooltip'],
                'units': column['pi_units']
            }
            rows.append(new_row)
    db.insert_rows('import', 'param_info', rows)

    return True
Esempio n. 21
0
def create_import_param_info_table():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    logger.log('info', 'Creating new import param_info table')
    pi_schema = import_util.read_schema_for_table('param_info')
    # Start from scratch
    db.drop_table('import', 'param_info')
    db.create_table('import', 'param_info', pi_schema, ignore_if_exists=False)

    # We use the permanent tables to determine what goes into param_info
    table_names = db.table_names('perm', prefix='obs_')

    rows = []
    for table_name in table_names:
        table_schema = import_util.read_schema_for_table(table_name)
        if table_schema is None:
            logger.log('error',
                       f'Unable to read table schema for "{table_name}"')
            return False
        for column in table_schema:
            category_name = column.get('pi_category_name', None)
            if category_name is None:
                continue
            # Log an error if value in pi_units is not in unit translation table
            unit = column.get('pi_units', None)
            field_name = column.get('field_name', None)
            if unit and unit not in opus_support.UNIT_TRANSLATION:
                logger.log(
                    'error',
                    f'"{unit}" in "{category_name}/{field_name}" is not ' +
                    'a valid unit in translation table')
            form_type = column.get('pi_form_type', None)
            if (unit and (not form_type or
                          (not form_type.startswith('RANGE%')
                           and not form_type.startswith('LONG%')))):
                logger.log(
                    'warning',
                    f'"{category_name}/{field_name}" has units but ' +
                    'not form_type RANGE%')
            if form_type == 'RANGE':
                logger.log(
                    'warning',
                    f'"{category_name}/{field_name}" has RANGE type ' +
                    'without numerical format')

            new_row = {
                'category_name': category_name,
                'dict_context': column['pi_dict_context'],
                'dict_name': column['pi_dict_name'],
                'disp_order': column['pi_disp_order'],
                'display': column['pi_display'],
                'display_results': column['pi_display_results'],
                'form_type': column['pi_form_type'],
                'intro': column['pi_intro'],
                'label': column['pi_label'],
                'label_results': column['pi_label_results'],
                'name': column['field_name'],
                'slug': column['pi_slug'],
                'old_slug': column.get('pi_old_slug', None),
                'sub_heading': column['pi_sub_heading'],
                'tooltip': column['pi_tooltip'],
                'units': column['pi_units']
            }
            rows.append(new_row)
    db.insert_rows('import', 'param_info', rows)

    return True
Esempio n. 22
0
def create_import_definitions_table():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    logger.log('info', 'Creating new import definitions table')
    def_schema = import_util.read_schema_for_table('definitions')
    # Start from scratch
    db.create_table('import', 'definitions', def_schema, ignore_if_exists=False)

    bad_db = False

    pds_file = opus_secrets.DICTIONARY_PDSDD_FILE
    json_schema_path = opus_secrets.DICTIONARY_JSON_SCHEMA_PATH + '/obs*.json'
    json_list = glob.glob(json_schema_path)
    json_def_path = (opus_secrets.DICTIONARY_JSON_SCHEMA_PATH +
                     '/internal_def*.json')
    json_list += glob.glob(json_def_path)
    rows = []

    logger.log('info', f'Importing {pds_file}')

    context = 'PSDD'
    try:
        label = pdsparser.PdsLabel.from_file(pds_file)
    except IOError as e:
        logger.log('error', f'Failed to read {pds_file}: {e.strerror}')
        bad_db = True
    else:
        for item in range(len(label)):
            term = str(label[item]['NAME']).rstrip('\r\n')
            try:
                definition = ' '.join(str(label[item]['DESCRIPTION']).split())
            except KeyError:
                logger.log('warning',
                           f'No description for item {item}: "{term}"')
                continue
            new_row = {
                'term': term,
                'context': context,
                'definition': definition
            }
            rows.append(new_row)

    for file_path in json_list:
        file_name = os.path.basename(file_path)
        file_name = os.path.splitext(file_name)[0]
        logger.log('info', f'Importing {file_name}')
        schema = import_util.read_schema_for_table(file_name)
        for column in schema:
            for suffix in ('', '_results'):
                if 'definition'+suffix in column:
                    definition = column['definition'+suffix]
                    if column.get('pi_dict_name'+suffix, None) is None:
                        logger.log('error',
                           f'Missing term for "{definition}" in "{file_name}"')
                        bad_db = True
                        continue
                    term = column['pi_dict_name'+suffix]
                    if column.get('pi_dict_context'+suffix, None) is None:
                        logger.log('error',
                         f'Missing context for "{definition}" in "{file_name}"')
                        bad_db = True
                        continue
                    context = column['pi_dict_context'+suffix]

                    new_row = {
                        'term': term,
                        'context': context,
                        'definition': definition
                    }
                    rows.append(new_row)

    if bad_db:
        return False

    db.insert_rows('import', 'definitions', rows)

    return True
Esempio n. 23
0
def create_import_param_info_table():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    logger.log('info', 'Creating new import param_info table')
    pi_schema = import_util.read_schema_for_table('param_info')
    # Start from scratch
    db.drop_table('import', 'param_info')
    db.create_table('import', 'param_info', pi_schema, ignore_if_exists=False)

    # We use the permanent tables to determine what goes into param_info
    table_names = db.table_names('perm', prefix='obs_')

    # read json file for ranges info
    ranges_filename = os.path.join('table_schemas', 'param_info_ranges.json')
    with open(ranges_filename, 'r') as fp:
        try:
            # read contents (str) and convert it to a json object (dict)
            contents = fp.read()
            ranges_json = json.loads(contents)
        except json.decoder.JSONDecodeError:
            logger.log('debug',
                       f'Was reading ranges json file "{ranges_filename}"')
            raise
        except:
            raise

    rows = []
    for table_name in table_names:
        table_schema = import_util.read_schema_for_table(table_name)
        if table_schema is None:
            logger.log('error',
                       f'Unable to read table schema for "{table_name}"')
            return False
        for column in table_schema:
            category_name = column.get('pi_category_name', None)
            if category_name is None:
                continue
            # Log an error if value in pi_units is not in unit translation table
            unit = column.get('pi_units', None)
            field_name = column.get('field_name', None)
            if unit and unit not in opus_support.UNIT_CONVERSION:
                logger.log(
                    'error',
                    f'"{unit}" in "{category_name}/{field_name}" is not ' +
                    'a valid unit')
                return False
            form_type = column.get('pi_form_type', None)
            if (unit and (not form_type or
                          (not form_type.startswith('RANGE%')
                           and not form_type.startswith('LONG%')))):
                logger.log(
                    'warning',
                    f'"{category_name}/{field_name}" has units but ' +
                    'not form_type RANGE%')
            if form_type == 'RANGE':
                logger.log(
                    'warning',
                    f'"{category_name}/{field_name}" has RANGE type ' +
                    'without numerical format')

            # if pi_ranges exists in .json, get the corresponding ranges info
            # from dict and convert it to str before storing to database
            ranges = column.get('pi_ranges', None)
            if ranges:
                if ranges in ranges_json:
                    ranges = ranges_json[ranges]
                    ranges = json.dumps(ranges)
                else:
                    logger.log(
                        'error',
                        f'pi_ranges: "{ranges}" is not in "{ranges_filename}"')
                    return False

            new_row = {
                'category_name': category_name,
                'dict_context': column.get('pi_dict_context', None),
                'dict_name': column.get('pi_dict_name', None),
                'dict_context_results': column.get('pi_dict_context_results',
                                                   None),
                'dict_name_results': column.get('pi_dict_name_results', None),
                'disp_order': column['pi_disp_order'],
                'display': column['pi_display'],
                'display_results': column['pi_display_results'],
                'form_type': column['pi_form_type'],
                'intro': column['pi_intro'],
                'label': column['pi_label'],
                'label_results': column['pi_label_results'],
                'name': column['field_name'],
                'slug': column['pi_slug'],
                'old_slug': column.get('pi_old_slug', None),
                'sub_heading': column['pi_sub_heading'],
                'tooltip': column['pi_tooltip'],
                'units': column['pi_units'],
                'ranges': ranges,
                'field_hints1': column.get('pi_field_hints1', None),
                'field_hints2': column.get('pi_field_hints2', None),
            }
            rows.append(new_row)
    db.insert_rows('import', 'param_info', rows)

    return True
Esempio n. 24
0
def create_import_partables_table():
    db = impglobals.DATABASE
    logger = impglobals.LOGGER

    logger.log('info', 'Creating new import partables table')
    partables_schema = import_util.read_schema_for_table('partables')
    # Start from scratch
    db.drop_table('import', 'partables')
    db.create_table('import',
                    'partables',
                    partables_schema,
                    ignore_if_exists=False)

    # We use the entries in data_config to determine the first part of what
    # goes into partables.

    # For obs_general, we care about mission_id, inst_host_id, instrument_id,
    # and type_id.

    rows = []

    for mission_abbrev in sorted(MISSION_ABBREV_TO_MISSION_TABLE_SFX.keys()):
        entry = {
            'trigger_tab':
            'obs_general',
            'trigger_col':
            'mission_id',
            'trigger_val':
            mission_abbrev,
            'partable': ('obs_mission_' +
                         MISSION_ABBREV_TO_MISSION_TABLE_SFX[mission_abbrev])
        }
        rows.append(entry)

    for instrument_id in sorted(INSTRUMENT_ABBREV_TO_MISSION_ABBREV.keys()):
        partable = 'obs_instrument_' + instrument_id.lower()
        if instrument_id[:3] == 'HST':
            # This is a hack because we don't actually have HST instrument
            # tables, but instead put everything in the mission tables
            partable = 'obs_mission_hubble'
        entry = {
            'trigger_tab': 'obs_general',
            'trigger_col': 'instrument_id',
            'trigger_val': instrument_id,
            'partable': partable
        }
        rows.append(entry)

    for inst_host_id in sorted(INST_HOST_ABBREV_TO_MISSION_ABBREV.keys()):
        entry = {
            'trigger_tab':
            'obs_general',
            'trigger_col':
            'inst_host_id',
            'trigger_val':
            inst_host_id,
            'partable': ('obs_mission_' + MISSION_ABBREV_TO_MISSION_TABLE_SFX[
                INST_HOST_ABBREV_TO_MISSION_ABBREV[inst_host_id]])
        }
        rows.append(entry)

    # We don't need this anymore because Image Constraints are now permanently
    # displayed
    # entry = {
    #     'trigger_tab': 'obs_general',
    #     'trigger_col': 'data_type',
    #     'trigger_val': 'IMG',
    #     'partable':    'obs_type_image'
    # }
    # rows.append(entry)

    surface_geo_table_names = impglobals.DATABASE.table_names(
        'perm', prefix='obs_surface_geometry__')
    for table_name in sorted(surface_geo_table_names):
        target_name = table_name.replace('obs_surface_geometry__', '')
        entry = {
            'trigger_tab': 'obs_surface_geometry_name',
            'trigger_col': 'target_name',
            'trigger_val': import_util.decode_target_name(target_name).upper(),
            'partable': table_name
        }
        rows.append(entry)

    db.insert_rows('import', 'partables', rows)