def test(self):
            # Set up a test cache
            storage = AttributeCache(cache_directory=self._temp_dir)
            SimulationState().set_current_time(2000)

            table_name = 'foo'

            values = {
                'attribute1': array([1, 2, 3], dtype=int32),
                'attribute2': array([4, 5, 6], dtype=int32),
            }

            storage.write_table(table_name, values)

            table_dir = os.path.join(self._temp_dir, '2000', table_name)
            self.assert_(os.path.exists(table_dir))

            actual = set(os.listdir(table_dir))
            expected = set([
                'attribute1.%(endian)si4' % replacements,
                'attribute2.%(endian)si4' % replacements
            ])
            self.assertEqual(expected, actual)

            exporter = ExportCacheToDbfTableCommand(
                cache_directory=self._temp_dir,
                year='2000',
                table_name=table_name,
                dbf_directory=self._temp_dir,
                decimalcount=4,
            )
            exporter.execute()

            out_storage = dbf_storage(self._temp_dir)

            db = _dbf_class(out_storage._get_file_path_for_table(table_name))
            length = max([len(values[key]) for key in values.keys()])
            i = 0
            field_type = {}
            for name, type in [
                    field.fieldInfo()[:2] for field in db.header.fields
            ]:
                field_type[name] = type
            for rec in db:
                for key in values.keys():
                    if field_type[key.upper()] is 'F':
                        self.assertAlmostEqual(values[key][i], rec[key], 4)
                    else:
                        self.assertEqual(values[key][i], rec[key])
                i = i + 1
            self.assertEquals(
                length,
                i,
                msg="More values expected than the dbf file contains")
            db.close()
    def execute(self):
        in_storage = dbf_storage(storage_location=self.dbf_directory)

        out_storage = AttributeCache(cache_directory=self.cache_directory)

        old_time = SimulationState().get_current_time()
        SimulationState().set_current_time(self.year)

        self._get_exporter().export_dataset(
            dataset_name=self.table_name,
            in_storage=in_storage,
            out_storage=out_storage,
        )

        SimulationState().set_current_time(old_time)
 def execute(self):
     in_storage = dbf_storage(storage_location=self.dbf_directory)
     
     out_storage = AttributeCache(cache_directory=self.cache_directory)
     
     old_time = SimulationState().get_current_time()
     SimulationState().set_current_time(self.year)
     
     self._get_exporter().export_dataset(
         dataset_name = self.table_name,
         in_storage = in_storage,
         out_storage = out_storage,
         )
     
     SimulationState().set_current_time(old_time)
 def execute(self):
     in_storage = AttributeCache(cache_directory=self.cache_directory)
     out_storage = dbf_storage(storage_location=self.dbf_directory,
                               digits_to_right_of_decimal=self.decimalcount)
     
     old_time = SimulationState().get_current_time()
     SimulationState().set_current_time(self.year)
     
     self._get_exporter().export_dataset(
         dataset_name = self.table_name,
         in_storage = in_storage,
         out_storage = out_storage,
         )
     
     SimulationState().set_current_time(old_time)
     return out_storage._short_names
    def execute(self):
        in_storage = AttributeCache(cache_directory=self.cache_directory)
        out_storage = dbf_storage(storage_location=self.dbf_directory,
                                  digits_to_right_of_decimal=self.decimalcount)

        old_time = SimulationState().get_current_time()
        SimulationState().set_current_time(self.year)

        self._get_exporter().export_dataset(
            dataset_name=self.table_name,
            in_storage=in_storage,
            out_storage=out_storage,
        )

        SimulationState().set_current_time(old_time)
        return out_storage._short_names
 def test(self):
     # Set up a test cache
     storage = AttributeCache(cache_directory=self._temp_dir)
     SimulationState().set_current_time(2000)
     
     table_name = 'foo'
     
     values = {
         'attribute1': array([1,2,3], dtype=int32),
         'attribute2': array([4,5,6], dtype=int32),
         }
     
     storage.write_table(table_name, values)
         
     table_dir = os.path.join(self._temp_dir, '2000', table_name)
     self.assert_(os.path.exists(table_dir))
     
     actual = set(os.listdir(table_dir))
     expected = set(['attribute1.%(endian)si4' % replacements, 'attribute2.%(endian)si4' % replacements])
     self.assertEqual(expected, actual)
     
     exporter = ExportCacheToDbfTableCommand(
     cache_directory = self._temp_dir,
     year = '2000',
     table_name = table_name,
     dbf_directory = self._temp_dir,
     decimalcount = 4,
     )
     exporter.execute()
     
     out_storage = dbf_storage(self._temp_dir)
     
     db = _dbf_class(out_storage._get_file_path_for_table(table_name))
     length = max([len(values[key]) for key in values.keys()])
     i = 0
     field_type = {}
     for name, type in [field.fieldInfo()[:2] for field in db.header.fields]:
         field_type[name] = type
     for rec in db:
         for key in values.keys():
             if field_type[key.upper()] is 'F':
                 self.assertAlmostEqual(values[key][i], rec[key], 4)
             else:
                 self.assertEqual(values[key][i], rec[key])
         i = i + 1
     self.assertEquals(length, i, msg="More values expected than the dbf file contains")
     db.close()
    def _set_exporter(self, exporter):
        """hook for unit tests"""
        self._exporter = exporter
        
    def _get_exporter(self):
        """hook for unit tests"""
        if self._exporter is None:
            # Create default exporter object.
            self._exporter = ExportStorage()
        return self._exporter
    
    
from opus_core.tests import opus_unittest

try:
    dbf_storage(storage_location='')
except:
    pass
else:
    import os, sys
    from shutil import rmtree
    from tempfile import mkdtemp
    from numpy import array, int32
    from dbfpy.dbf import Dbf as _dbf_class
    from opus_core.tests.utils.cache_extension_replacements import replacements
    
    class FunctionalTests(opus_unittest.OpusTestCase):
        def setUp(self):
            self._temp_dir = mkdtemp(prefix='opus_tmp_export_cache_to_dbf_table_command')
            
        def tearDown(self):
    def _set_exporter(self, exporter):
        """hook for unit tests"""
        self._exporter = exporter

    def _get_exporter(self):
        """hook for unit tests"""
        if self._exporter is None:
            # Create default exporter object.
            self._exporter = ExportStorage()
        return self._exporter


from opus_core.tests import opus_unittest

try:
    dbf_storage(storage_location='')
except:
    pass
else:
    import os, sys
    from shutil import rmtree
    from tempfile import mkdtemp
    from numpy import array, int32
    from dbfpy.dbf import Dbf as _dbf_class
    from opus_core.tests.utils.cache_extension_replacements import replacements

    class FunctionalTests(opus_unittest.OpusTestCase):
        def setUp(self):
            self._temp_dir = mkdtemp(
                prefix='opus_tmp_export_cache_to_dbf_table_command')
    (options, args) = parser.parse_args()

    dbf_directory = options.dbf_directory
    attribute_cache_directory = options.attribute_cache_directory    
    table_name = options.table_name    
    cache_year = options.cache_year
    
    if (dbf_directory is None or 
        attribute_cache_directory is None or 
        table_name is None or
        cache_year is None):
        
        parser.print_help()
        sys.exit(1)
        
    input_storage = dbf_storage(storage_location = dbf_directory)
    
    attribute_cache = AttributeCache(cache_directory=attribute_cache_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(cache_year)
    SimulationState().set_current_time(cache_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())
    
    logger.start_block("Exporting table '%s' to year %s of cache located at %s..." %
                   (table_name, cache_year, attribute_cache_directory))
    try:
        ExportStorage().export_dataset(
            dataset_name = table_name,
            in_storage = input_storage, 
            out_storage = output_storage,
        "The attribute cache year into which to write the output (required).")

    (options, args) = parser.parse_args()

    dbf_directory = options.dbf_directory
    attribute_cache_directory = options.attribute_cache_directory
    table_name = options.table_name
    cache_year = options.cache_year

    if (dbf_directory is None or attribute_cache_directory is None
            or table_name is None or cache_year is None):

        parser.print_help()
        sys.exit(1)

    input_storage = dbf_storage(storage_location=dbf_directory)

    attribute_cache = AttributeCache(cache_directory=attribute_cache_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(cache_year)
    SimulationState().set_current_time(cache_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())

    logger.start_block(
        "Exporting table '%s' to year %s of cache located at %s..." %
        (table_name, cache_year, attribute_cache_directory))
    try:
        ExportStorage().export_dataset(
            dataset_name=table_name,
            in_storage=input_storage,
    parser = OptionParser()
    
    parser.add_option('-c', '--cache_path', dest='cache_path', type='string', 
        help='The filesystem path to the cache to export (required)')
    parser.add_option('-o', '--output_directory', dest='output_directory', 
        type='string', help='The filesystem path of the database to which '
            'output will be written (required)')
    parser.add_option('-t', '--table_name', dest='table_name', 
        type='string', help='Name of table to be exported (optional). Used if only one table should be exported.')

    (options, args) = parser.parse_args()
    
    cache_path = options.cache_path
    output_directory = options.output_directory
    table_name = options.table_name
    
    if None in (cache_path, output_directory):
        parser.print_help()
        sys.exit(1)

    in_storage = flt_storage(storage_location = cache_path)

    out_storage = dbf_storage(storage_location = output_directory)
    
    if not os.path.exists(output_directory):
        os.makedirs(output_directory)

    if table_name is not None:
        ExportStorage().export_dataset(table_name, in_storage=in_storage, out_storage=out_storage)
    else:
        ExportStorage().export(in_storage=in_storage, out_storage=out_storage)
Ejemplo n.º 12
0
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2012 University of California, Berkeley, 2005-2009 University of Washington
# See opus_core/LICENSE

from opus_core.store.csv_storage import csv_storage
from opus_core.store.dbf_storage import dbf_storage
import os, sys

if len(sys.argv) not in (2,3):
    print "Usage: %s csv_file [dbf_file]" % sys.argv[0]
    sys.exit(0)

csv_file = sys.argv[1]
csv_file = os.path.normpath(csv_file)
csv_path, csv_name = os.path.split(csv_file)
csv_table, csv_ext = os.path.splitext(csv_name)

if len(sys.argv) == 2:
    dbf_path, dbf_table = csv_path, csv_table
elif len(sys.argv) == 3:
    dbf_file = sys.argv[2]
    dbf_file = os.path.normpath(dbf_file)
    dbf_path, dbf_name = os.path.split(dbf_file)
    dbf_table, dbf_ext = os.path.splitext(dbf_name)

csv_store = csv_storage(storage_location=csv_path)
dbf_store = dbf_storage(storage_location=dbf_path)
data = csv_store.load_table(csv_table)
dbf_store.write_table(dbf_table, data)