def setUp(self): self.db = bulkup.Database(connection) im = importer.Importer() im.import_partner_sites(self.db) im.import_pile_groups( self.db, importer.PlainFile('.', testdata('pile_group_info.csv'))) im.import_piles(self.db, importer.PlainFile('.', testdata('pile_info.csv'))) im.import_taxa(self.db, importer.PlainFile('.', testdata('taxa.csv'))) im.import_distributions( importer.PlainFile('.', testdata('dist_north_america.csv')))
def test_import_taxons(self): im = importer.Importer() im.import_partner_sites(self.db) im.import_pile_groups( self.db, importer.PlainFile('.', testdata('pile_group_info.csv'))) im.import_piles(self.db, importer.PlainFile('.', testdata('pile_info.csv'))) im.import_wetland_indicators( self.db, importer.PlainFile('.', testdata('wetland_indicators.csv'))) im.import_taxa(self.db, importer.PlainFile('.', testdata('taxa.csv'))) self.assertEquals(len(models.Taxon.objects.all()), 3522)
def test_import_characters(self): im = importer.Importer() im.import_characters( self.db, importer.PlainFile('.', testdata('characters.csv'))) f = open(testdata('characters.csv')) content = f.read() f.close() expected = len(content.splitlines()) - min(content.count('_min'), content.count('_max')) - 1 self.assertEquals(len(models.Character.objects.all()), expected)
def rebuild_default_filters(characters_csv): """Rebuild default filters for every pile, using CSV data where available or choosing 'best' characters otherwise. """ from gobotany.core import importer # here to avoid import loop log.info('Importing default filters from characters.csv') # Since we do not know whether we have been called directly with # "-m" or whether we have been called from .importer as part of a # big full import: if isinstance(characters_csv, basestring): characters_csv = importer.PlainFile('.', characters_csv) log.info(' Clearing the DefaultFilter table') models.DefaultFilter.objects.all().delete() # Read in the file and import its data. db = bulkup.Database(connection) pile_map = db.map('core_pile', 'name', 'id') character_map = db.map('core_character', 'short_name', 'id') piles_seen = set() table = db.table('core_defaultfilter') stuff = importer.read_default_filters(characters_csv) for key_name, pile_name, n, character_slug in stuff: if pile_name in pile_map.keys(): piles_seen.add(pile_name) table.get( key=key_name, pile_id=pile_map[pile_name], order=n, character_id=character_map[character_slug], ) else: log.error(' Pile %s not found' % pile_name) # Make sure all piles were covered, and throw in two filters that # appear for all piles. for pile in models.Pile.objects.all(): if pile.name not in piles_seen: log.error(' No default filters were given for pile %r', pile.name) table.get(key='simple', pile_id=pile.id, order=-2, character_id=character_map['habitat_general']) table.get(key='simple', pile_id=pile.id, order=-1, character_id=character_map['state_distribution']) table.get(key='full', pile_id=pile.id, order=-3, character_id=character_map['habitat_general']) table.get(key='full', pile_id=pile.id, order=-2, character_id=character_map['habitat']) table.get(key='full', pile_id=pile.id, order=-1, character_id=character_map['state_distribution']) # And we are done. table.save()