def handle(self, *args, **options): http = HttpFetcher() if options['cache']: http.set_cache_dir(options['cache']) min_importer = MinutesImporter(http_fetcher=http) min_importer.replace = options['update'] min_importer.import_terms() if options['party']: importer = PartyImporter(http_fetcher=http) importer.replace = options['update'] importer.import_parties() importer.import_governments() importer.import_governingparties() if options['member']: importer = MemberImporter(http_fetcher=http) importer.replace = options['update'] importer.import_districts() args = {} if options['single']: args['single'] = options['single'] importer.import_members(args) if options['seat']: importer = SeatImporter(http_fetcher=http) importer.replace = options['update'] importer.import_seats() if options['minutes']: args = {} if options['single']: args['single'] = options['single'] if options['from_year']: args['from_year'] = options['from_year'] if options['from_id']: args['from_id'] = options['from_id'] args['massive'] = options['massive'] min_importer.import_minutes(args) if options['docs']: importer = DocImporter(http_fetcher=http) importer.replace = options['update'] args = {} if options['single']: args['single'] = options['single'] if options['from_year']: args['from_year'] = options['from_year'] args['massive'] = options['massive'] if options['refresh']: importer.refresh_docs(**args) else: importer.import_docs(**args) if options['vote']: importer = VoteImporter(http_fetcher=http) importer.replace = options['update'] importer.import_votes() if options['funding']: importer = FundingImporter(http_fetcher=http) importer.replace = options['update'] importer.import_funding()
def handle(self, **options): http = HttpFetcher() http.set_cache_dir(os.path.join(settings.PROJECT_ROOT, ".cache")) requests_cache.install_cache('geo_import_man') self.data_path = os.path.join(settings.PROJECT_ROOT, 'data') self.http = http print "Importing municipalities" self.import_municipalities() print "Importing POIs" self.import_pois_from_csv() self.import_pois_from_rest()
def handle(self, **options): http = HttpFetcher() http.set_cache_dir(os.path.join(settings.PROJECT_ROOT, ".cache")) requests_cache.install_cache('geo_import_man') self.data_path = os.path.join(settings.PROJECT_ROOT, 'data') self.http = http print "Importing municipalities" self.import_municipalities() print "Importing POIs" self.import_pois_from_csv() self.import_pois_from_rest()
def handle(self, **options): http = HttpFetcher() http.set_cache_dir(os.path.join(settings.PROJECT_ROOT, ".cache")) self.data_path = os.path.join(settings.PROJECT_ROOT, "..", "data") self.http = http print "Importing municipalities" self.import_municipalities() print "Importing municipality boundaries" self.import_municipality_boundaries() print "Importing election stats" self.import_election_stats() print "Importing trustees" self.import_trustees() print "Importing voting districts" self.import_voting_districts() print "Importing voting district boundaries" self.import_voting_district_boundaries() print "Importing voting district stats" self.import_voting_district_stats() print "Importing candidate stats" self.import_candidate_stats()
def handle(self, **options): http = HttpFetcher() http.set_cache_dir(os.path.join(settings.PROJECT_ROOT, ".cache")) self.data_path = os.path.join(settings.PROJECT_ROOT, '..', 'data') self.http = http print "Importing municipalities" self.import_municipalities() print "Importing municipality boundaries" self.import_municipality_boundaries() print "Importing election stats" self.import_election_stats() print "Importing trustees" self.import_trustees() print "Importing voting districts" self.import_voting_districts() print "Importing voting district boundaries" self.import_voting_district_boundaries() print "Importing voting district stats" self.import_voting_district_stats() print "Importing candidate stats" self.import_candidate_stats()
def handle(self, *args, **options): verbosity = int(options['verbosity']) http = HttpFetcher() if options['cache']: http.set_cache_dir(options['cache']) min_importer = MinutesImporter(http_fetcher=http) min_importer.replace = options['replace'] min_importer.import_terms() if options['party']: importer = PartyImporter(http_fetcher=http) importer.replace = options['replace'] importer.import_parties() importer.import_governments() importer.import_governingparties() if options['member']: importer = MemberImporter(http_fetcher=http) importer.replace = options['replace'] importer.import_districts() args = {} if options['single']: args['single'] = options['single'] args['full'] = options['full'] args['dry_run'] = options['dry_run'] importer.import_members(**args) if options['seat']: importer = SeatImporter(http_fetcher=http) importer.replace = options['replace'] importer.import_seats() if options['minutes']: args = {} if options['single']: args['single'] = options['single'] if options['from_year']: args['from_year'] = options['from_year'] if options['from_id']: args['from_id'] = options['from_id'] args['full'] = options['full'] min_importer.import_minutes(args) if options['docs']: importer = DocImporter(http_fetcher=http) importer.replace = options['replace'] args = {} if options['single']: args['single'] = options['single'] if options['from_year']: args['from_year'] = options['from_year'] if options['from_id']: args['from_id'] = options['from_id'] args['full'] = options['full'] importer.import_docs(**args) if options['vote']: importer = VoteImporter(http_fetcher=http) importer.replace = options['replace'] args = {} if options['single']: args['single'] = options['single'] if options['from_year']: args['from_year'] = options['from_year'] args['full'] = options['full'] importer.import_votes(**args) if options['funding']: importer = FundingImporter(http_fetcher=http) importer.replace = options['replace'] importer.import_funding() if options['keyword_activity']: term = Term.objects.latest() for idx, kw in enumerate(Keyword.objects.all()): if verbosity == 2 and idx % 100 == 0: print(idx) # First for just the most recent term recent = kw.store_activity_score(term=term) # Then for all time all_time = kw.store_activity_score() if verbosity >= 3: print("%s: all time %d, recent term %d" % (kw, all_time.score, recent.score))
def handle(self, *args, **options): verbosity = int(options['verbosity']) http = HttpFetcher() if options['cache']: http.set_cache_dir(options['cache']) min_importer = MinutesImporter(http_fetcher=http) min_importer.replace = options['replace'] min_importer.import_terms() if options['party']: importer = PartyImporter(http_fetcher=http) importer.replace = options['replace'] importer.import_parties() importer.import_governments() importer.import_governingparties() if options['member']: importer = MemberImporter(http_fetcher=http) importer.replace = options['replace'] importer.import_districts() args = {} if options['single']: args['single'] = options['single'] args['full'] = options['full'] args['dry_run'] = options['dry_run'] importer.import_members(**args) if options['seat']: importer = SeatImporter(http_fetcher=http) importer.replace = options['replace'] importer.import_seats() if options['minutes']: args = {} if options['single']: args['single'] = options['single'] if options['from_year']: args['from_year'] = options['from_year'] if options['from_id']: args['from_id'] = options['from_id'] args['full'] = options['full'] min_importer.import_minutes(args) if options['docs']: importer = DocImporter(http_fetcher=http) importer.replace = options['replace'] args = {} if options['single']: args['single'] = options['single'] if options['from_year']: args['from_year'] = options['from_year'] if options['from_id']: args['from_id'] = options['from_id'] if options['doc_type']: args['doc_type'] = options['doc_type'] args['full'] = options['full'] importer.import_docs(**args) if options['vote']: importer = VoteImporter(http_fetcher=http) importer.replace = options['replace'] args = {} if options['single']: args['single'] = options['single'] if options['from_year']: args['from_year'] = options['from_year'] args['full'] = options['full'] importer.import_votes(**args) if options['funding']: importer = FundingImporter(http_fetcher=http) importer.replace = options['replace'] importer.import_funding() if options['keyword_activity']: term = Term.objects.latest() for idx, kw in enumerate(Keyword.objects.all()): if verbosity == 2 and idx % 100 == 0: print(idx) # First for just the most recent term recent = kw.store_activity_score(term=term) # Then for all time all_time = kw.store_activity_score() if verbosity >= 3: print(("%s: all time %d, recent term %d" % (kw, all_time.score, recent.score)))