def opusRun(progressCB, logCB, params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    esri_data_path = params_dict["esri_data_path"]
    esri_table_name = params_dict["esri_table_name"]
    opus_data_directory = params_dict["opus_data_directory"]
    opus_data_year = params_dict["opus_data_year"]

    input_storage = esri_storage(storage_location=esri_data_path)
    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)

    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True, package_order=[], in_storage=AttributeCache())

    if esri_table_name == "ALL":
        logCB("Sending all tables to OPUS storage...\n")
        lst = input_storage.get_table_names()
        for i in lst:
            ExportStorage().export_dataset(dataset_name=i, in_storage=input_storage, out_storage=output_storage)

    else:
        logCB("Exporting table '%s' to OPUS storage located at %s...\n" % (esri_table_name, opus_data_directory))
        ExportStorage().export_dataset(
            dataset_name=esri_table_name, in_storage=input_storage, out_storage=output_storage
        )
        logCB("Finished exporting table '%s'\n" % (esri_table_name))
def opusRun(progressCB, logCB, params):
    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    # Output esri data path
    esri_data_path = params_dict['esri_data_path']
    # Data clasification - Database (must be specified)
    opus_data_directory = params_dict['opus_data_directory']
    # Data clasification - Dataset (explicit or ALL)
    opus_data_year = params_dict['opus_data_year']
    # Data clasification - Array (explicit or ALL)
    opus_table_name = params_dict['opus_table_name']

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    attribute_cache_years = [
        int(year) for year in os.listdir(opus_data_directory)
        if year.isdigit() and len(year) == 4
    ]

    if opus_data_year != 'ALL':
        attribute_cache_years = [opus_data_year]

    for year in attribute_cache_years:

        input_storage = attribute_cache.get_flt_storage_for_year(year)

        if esri_is_avail:
            output_storage = esri_storage(storage_location=esri_data_path)
        else:
            output_storage = None
        SimulationState().set_current_time(year)
        SessionConfiguration(new_instance=True,
                             package_order=[],
                             in_storage=AttributeCache())

        if opus_table_name != 'ALL':
            opus_table_name_list = [opus_table_name]
        else:
            opus_table_name_list = input_storage.get_table_names()

        for i in opus_table_name_list:
            logCB("Exporting %s, %s, %s\n" % (i, year, opus_data_directory))
            ExportStorage().export_dataset(
                dataset_name=i,
                in_storage=input_storage,
                out_storage=output_storage,
            )
def opusRun(progressCB,logCB,params):
    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    # Output esri data path
    esri_data_path = params_dict['esri_data_path']
    # Data clasification - Database (must be specified)
    opus_data_directory = params_dict['opus_data_directory']
    # Data clasification - Dataset (explicit or ALL)
    opus_data_year = params_dict['opus_data_year']
    # Data clasification - Array (explicit or ALL)
    opus_table_name = params_dict['opus_table_name']

    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    attribute_cache_years = [int(year) for year in os.listdir(opus_data_directory) if year.isdigit() and len(year) == 4]

    if opus_data_year != 'ALL':
        attribute_cache_years = [opus_data_year]

    for year in attribute_cache_years:

        input_storage = attribute_cache.get_flt_storage_for_year(year)
        
        if esri_is_avail:
            output_storage = esri_storage(storage_location = esri_data_path)
        else:
            output_storage = None
        SimulationState().set_current_time(year)
        SessionConfiguration(new_instance=True,
                             package_order=[],
                             in_storage=AttributeCache())
        
        if opus_table_name != 'ALL':
            opus_table_name_list = [opus_table_name]
        else:
            opus_table_name_list = input_storage.get_table_names()

        for i in opus_table_name_list:
            logCB("Exporting %s, %s, %s\n" % (i,year,opus_data_directory))
            ExportStorage().export_dataset(
                dataset_name = i,
                in_storage = input_storage,
                out_storage = output_storage,
                )
Esempio n. 4
0
def opusRun(progressCB, logCB, params):

    params_dict = {}
    for key, val in params.iteritems():
        params_dict[str(key)] = str(val)

    esri_data_path = params_dict['esri_data_path']
    esri_table_name = params_dict['esri_table_name']
    opus_data_directory = params_dict['opus_data_directory']
    opus_data_year = params_dict['opus_data_year']

    input_storage = esri_storage(storage_location=esri_data_path)
    attribute_cache = AttributeCache(cache_directory=opus_data_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)

    SimulationState().set_current_time(opus_data_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    if esri_table_name == 'ALL':
        logCB("Sending all tables to OPUS storage...\n")
        lst = input_storage.get_table_names()
        for i in lst:
            ExportStorage().export_dataset(
                dataset_name=i,
                in_storage=input_storage,
                out_storage=output_storage,
            )

    else:
        logCB("Exporting table '%s' to OPUS storage located at %s...\n" %
              (esri_table_name, opus_data_directory))
        ExportStorage().export_dataset(
            dataset_name=esri_table_name,
            in_storage=input_storage,
            out_storage=output_storage,
        )
        logCB("Finished exporting table '%s'\n" % (esri_table_name))
Esempio n. 5
0
        "The attribute cache year into which to write the output (required).")

    (options, args) = parser.parse_args()

    esri_path = options.esri_path
    attribute_cache_directory = options.attribute_cache_directory
    table_name = options.table_name
    cache_year = options.cache_year

    if (esri_path is None or attribute_cache_directory is None
            or table_name is None or cache_year is None):

        parser.print_help()
        sys.exit(1)

    input_storage = esri_storage(storage_location=esri_path)

    attribute_cache = AttributeCache(cache_directory=attribute_cache_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(cache_year)
    SimulationState().set_current_time(cache_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    logger.start_block(
        "Exporting table '%s' to year %s of cache located at %s..." %
        (table_name, cache_year, attribute_cache_directory))
    try:
        ExportStorage().export_dataset(
            dataset_name=table_name,
            in_storage=input_storage,
    (options, args) = parser.parse_args()

    esri_path = options.esri_path
    attribute_cache_directory = options.attribute_cache_directory
    table_name = options.table_name
    cache_year = options.cache_year

    if (esri_path is None or
        attribute_cache_directory is None or
        table_name is None or
        cache_year is None):

        parser.print_help()
        sys.exit(1)

    input_storage = esri_storage(storage_location = esri_path)

    attribute_cache = AttributeCache(cache_directory=attribute_cache_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(cache_year)
    SimulationState().set_current_time(cache_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    logger.start_block("Exporting table '%s' to year %s of cache located at %s..." %
                   (table_name, cache_year, attribute_cache_directory))
    try:
        ExportStorage().export_dataset(
            dataset_name = table_name,
            in_storage = input_storage,
            out_storage = output_storage,