Example #1
0
def scene_dir_setup():
    asset_bn = 'global-daily-chirps-v2.0.1997.07.01.tif.gz'
    product_bn = 'global_1997182_chirps_precip.tif'
    tile_fp = os.path.join(chirps.chirpsRepository.path('tiles'), 'global')
    scene_fp = os.path.join(tile_fp, '1997182')

    target_asset_fp = os.path.join(scene_fp, asset_bn)
    target_product_fp = os.path.join(scene_fp, product_bn)
    link_target = '/vsigzip/' + target_asset_fp

    # race condition here, but for integration tests, probably close enough
    if os.path.lexists(target_product_fp):
        raise IOError('file obstructing test, aborting: ' + target_product_fp)

    orm.setup()
    # setup by moving the asset into the archive, but only if needed
    use_fake_asset = not os.path.exists(target_asset_fp)
    if use_fake_asset:
        source_asset_fp = os.path.join(os.path.dirname(__file__), 'data',
                                       asset_bn)
        made_dirs = [d for d in tile_fp, scene_fp if not os.path.exists(d)]
        [os.mkdir(d) for d in made_dirs]
        shutil.copy(source_asset_fp, target_asset_fp)
        dbinv.rectify_assets(chirps.chirpsAsset)

    yield target_product_fp, link_target

    # cleanup (using a fake DB means don't have to rectify post-delete)
    if os.path.lexists(target_product_fp):
        os.remove(target_product_fp)
    if use_fake_asset:
        os.remove(target_asset_fp)
        [os.rmdir(d)
         for d in reversed(made_dirs)]  # <- remove in correct order
Example #2
0
def migrate_database():
    """Migrate the database if the ORM is turned on."""
    if not orm.use_orm():
        return
    print 'Migrating database'
    orm.setup()
    call_command('migrate', interactive=False)
Example #3
0
def t_setup(mocker, setup_complete, use_orm, expected):
    """Test orm.setup() to confirm it only runs setup at the right times."""
    mocker.patch('gips.inventory.orm.setup_complete', setup_complete)
    m_use_orm = mocker.patch('gips.inventory.orm.use_orm')
    m_use_orm.return_value = use_orm
    m_django_setup = mocker.patch('gips.inventory.orm.django.setup')
    orm.setup()
    # confirm it ran/didn't run django.setup as expected, and confirm
    # that the global var got set in any case
    assert expected == m_django_setup.called and orm.setup_complete
Example #4
0
def gips_script_setup(driver_string=None, stop_on_error=False, setup_orm=True):
    """Run this at the beginning of a GIPS CLI program to do setup."""
    global _stop_on_error
    _stop_on_error = stop_on_error
    set_error_handler(cli_error_handler)
    from gips.inventory import orm  # avoids a circular import
    with error_handler():
        # must run before orm.setup
        data_class = None if driver_string is None else import_data_class(
            driver_string)
        if setup_orm:
            orm.setup()
        return data_class
Example #5
0
def t_inventory_setup(override_settings, mocker):
    override_settings().GIPS_ORM = True
    setup_mock = mocker.patch.object(gips.inventory.orm.django, 'setup')

    gips.inventory.orm.setup_complete = False  # in case its been called already

    setup()  # orm.setup()
    assert gips.inventory.orm.setup_complete
    setup_mock.assert_called_once_with()

    setup()  # call again and confirm that situation still looks right
    assert gips.inventory.orm.setup_complete
    setup_mock.assert_called_once_with()
Example #6
0
def main():
    title = Colors.BOLD + 'GIPS Data Archive Utility (v%s)' % gipsversion + Colors.OFF

    # argument parsing
    parser = GIPSParser(description=title)
    group = parser.add_argument_group('archive options')
    group.add_argument('--keep',
                       help='Keep files after adding to archive',
                       default=False,
                       action='store_true')
    group.add_argument('--recursive',
                       help='Iterate through subdirectories',
                       default=False,
                       action='store_true')
    group.add_argument(
        '--update',
        help=
        'Update asset if newer version available, (must call gips_process to regenerate products',
        default=False,
        action='store_true')
    group.add_argument(
        '--path',
        default='.',
        help='Path to search for files to archive, defaults to `.`')
    args = parser.parse_args()

    utils.gips_script_setup(None, args.stop_on_error)

    with utils.error_handler('Data archive error'):
        print title
        cls = import_data_class(args.command)
        orm.setup()  # set up DB orm in case it's needed for Asset.archive()
        archived_assets = cls.archive_assets(args.path, args.recursive,
                                             args.keep, args.update)

        # if DB inventory is enabled, update it to contain the newly archived assets
        if orm.use_orm():
            for a in archived_assets:
                dbinv.update_or_add_asset(asset=a.asset,
                                          sensor=a.sensor,
                                          tile=a.tile,
                                          date=a.date,
                                          name=a.archived_filename,
                                          driver=cls.name.lower())

    utils.gips_exit()
Example #7
0
def t_api_inventory(driver, spatial, product, datespec, expected):
    """
    Test gips.data.core.Data.inventory for different API parameter
    combinations.
    """
    driver_setup.setup_repo_data(driver)
    from gips.data.modis import modisData, modisAsset
    from gips.inventory import orm
    import gips.inventory.dbinv.api as api

    orm.setup()
    api.rectify_assets(modisAsset)
    r = spatial if spatial.endswith('.tif') else None
    s = spatial if r is None else None
    inv = modisData.inventory(
        site=s,
        rastermask=r,
        dates=datespec,
        products=[product],
    )
    assert len(inv.dates) == expected