def index(request): #Getting the session session = get_session() schools = {} template_name = 'leaguetable/homepage.html' geo_level = "country" geo_code = "TZ" if request.method == 'POST': try: params = json.loads(request.body) year = params['year'].encode("utf8") schools = get_overall_topschools(year, geo_level, geo_code, session) finally: session.close() return HttpResponse(json.dumps({'schools': schools}), content_type='application/json') else: year = '2017' #Getting Schools from overall top schools method try: schools = get_overall_topschools(year, geo_level, geo_code, session) finally: session.close() return render(request,'leaguetable/homepage.html',{'schools':schools, 'root_geo': geo_data.root_geography()})
def get_context_data(self, *args, **kwargs): page_context = {} session = get_session() try: request = self.request year = request.GET.get('year') or self.year # load the profile profile_method = settings.WAZIMAP.get('profile_builder', None) self.profile_name = settings.WAZIMAP.get('default_profile', 'default') if not profile_method: raise ValueError("You must define WAZIMAP.profile_builder in settings.py") profile_method = import_string(profile_method) profile_data = profile_method(self.geo, self.profile_name, self.request, year) profile_data['geography'] = self.geo.as_dict_deep() coordinates, totalschools = get_schools_coordinates(self.geo, year, session) profile_data['coordinates'] = json.dumps(coordinates, cls=DjangoJSONEncoder) profile_data['totalschools'] = totalschools profile_data['year'] = year profile_data = enhance_api_data(profile_data) page_context.update(profile_data) profile_data_json = SafeString(json.dumps(profile_data, cls=DjangoJSONEncoder)) page_context.update({ 'profile_data_json': profile_data_json }) # is this a head-to-head view? page_context['head2head'] = 'h2h' in self.request.GET finally: session.close() return page_context
def raw_data_for_geos(self, geos, release=None, year=None): # initial values data = {('%s-%s' % (geo.geo_level, geo.geo_code)): { 'estimate': {}, 'error': {} } for geo in geos} db_table = self.get_db_table(release=release, year=year) columns = self.columns(db_table) session = get_session() try: geo_values = None rows = session\ .query(db_table.model)\ .filter(or_(and_( db_table.model.geo_level == g.geo_level, db_table.model.geo_code == g.geo_code, db_table.model.geo_version == g.version) for g in geos))\ .all() for row in rows: geo_values = data['%s-%s' % (row.geo_level, row.geo_code)] for col in columns.keys(): geo_values['estimate'][col] = getattr(row, col) geo_values['error'][col] = 0 finally: session.close() return data
def get_profile(geo, profile_name, request): session = get_session() try: comp_geos = geo_data.get_comparative_geos(geo) data = {} sections = list(PROFILE_SECTIONS) if geo.geo_level not in ['country', 'province', 'municipality']: pass # Raise error as we don't have this data for section in sections: function_name = 'get_%s_profile' % section if function_name in globals(): func = globals()[function_name] data[section] = func(geo, session) # get profiles for province and/or country for comp_geo in comp_geos: # merge summary profile into current geo profile merge_dicts(data[section], func(comp_geo, session), comp_geo.geo_level) group_remainder(data['households']['type_of_dwelling_distribution'], 5) group_remainder(data['service_delivery']['water_source_distribution'], 5) group_remainder( data['service_delivery']['toilet_facilities_distribution'], 5) return data finally: session.close()
def raw_data_for_geos(self, geos): data = {} # group by geo level geos = sorted(geos, key=lambda g: g.geo_level) for geo_level, geos in groupby(geos, lambda g: g.geo_level): geo_codes = [g.geo_code for g in geos] # initial values for geo_code in geo_codes: data['%s-%s' % (geo_level, geo_code)] = { 'estimate': {}, 'error': {} } session = get_session() try: geo_values = None rows = session\ .query(self.model)\ .filter(self.model.c.geo_level == geo_level)\ .filter(self.model.c.geo_code.in_(geo_codes))\ .all() for row in rows: geo_values = data['%s-%s' % (geo_level, row.geo_code)] for col in self.columns.iterkeys(): geo_values['estimate'][col] = getattr(row, col) geo_values['error'][col] = 0 finally: session.close() return data
def get(self, request, geo_level, geo_code): results = {} version = self.request.GET.get("geo_version", settings.WAZIMAP["default_geo_version"]) geography = Geography.objects.get(geo_level=geo_level, geo_code=geo_code, version=version) year = self.request.GET.get("release", geo_data.primary_release_year(geography)) results["geograhy"] = geography.as_dict_deep() if settings.WAZIMAP["latest_release_year"] == year: year = "latest" build_indicator = settings.DYNAMIC_PROFILE_INDICATOR if build_indicator: module = importlib.import_module(build_indicator["path"]) build_indicator = getattr(module, build_indicator["class"]) else: build_indicator = BuildIndicator session = get_session() with dataset_context(year=year): section = Section(geography, session) section_results = section.build(BuildProfile, build_indicator) results["data"] = section_results return Response(results, status=status.HTTP_200_OK)
def raw_data_for_geos(self, geos, release=None, year=None): # initial values data = {('%s-%s' % (geo.geo_level, geo.geo_code)): { 'estimate': {}, 'error': {}} for geo in geos} db_table = self.get_db_table(release=release, year=year) columns = self.columns(db_table) session = get_session() try: geo_values = None rows = session\ .query(db_table.model)\ .filter(or_(and_( db_table.model.geo_level == g.geo_level, db_table.model.geo_code == g.geo_code, db_table.model.geo_version == g.version) for g in geos))\ .all() for row in rows: geo_values = data['%s-%s' % (row.geo_level, row.geo_code)] for col in columns.keys(): geo_values['estimate'][col] = getattr(row, col) geo_values['error'][col] = 0 finally: session.close() return data
def get_profile(geo_code, geo_level, profile_name=None): session = get_session() try: geo_summary_levels = geo_data.get_summary_geo_info(geo_code, geo_level) data = {} sections = list(PROFILE_SECTIONS) if geo_level not in ['country', 'province', 'district', 'municipality']: pass # Raise error as we don't have this data for section in sections: function_name = 'get_%s_profile' % section if function_name in globals(): func = globals()[function_name] data[section] = func(geo_code, geo_level, session) # get profiles for province and/or country for level, code in geo_summary_levels: # merge summary profile into current geo profile merge_dicts(data[section], func(code, level, session), level) return data finally: session.close()
def reverse(apps, schema_editor): """ Drop the new geo_version column from all data tables """ session = get_session() inspector = inspect(session.bind) try: for data_table in DATA_TABLES.itervalues(): db_model = data_table.model table = db_model.__table__ # remove the primary key constraint, if any pk = inspector.get_pk_constraint(table.name)['name'] if pk: session.execute("ALTER TABLE %s DROP CONSTRAINT %s" % (table.name, pk)) # drop the new column session.execute("ALTER TABLE %s DROP COLUMN geo_version" % table.name) # add the old pk constraint pk = table.primary_key pk.columns.remove(table.c.geo_version) session.execute(AddConstraint(pk)) session.commit() finally: session.close()
def get_profile(geo, profile_name, request): session = get_session() try: comp_geos = geo_data.get_comparative_geos(geo) data = {} sections = list(PROFILE_SECTIONS) if geo.geo_level not in ['country', 'province', 'municipality']: pass # Raise error as we don't have this data for section in sections: function_name = 'get_%s_profile' % section if function_name in globals(): func = globals()[function_name] data[section] = func(geo, session) # get profiles for province and/or country for comp_geo in comp_geos: # merge summary profile into current geo profile merge_dicts(data[section], func(comp_geo, session), comp_geo.geo_level) group_remainder(data['households']['type_of_dwelling_distribution'], 5) group_remainder(data['service_delivery']['water_source_distribution'], 5) group_remainder(data['service_delivery']['toilet_facilities_distribution'], 5) return data finally: session.close()
def raw_data_for_geos(self, geos): data = {} # group by geo level geos = sorted(geos, key=lambda g: g.geo_level) for geo_level, geos in groupby(geos, lambda g: g.geo_level): geo_codes = [g.geo_code for g in geos] # initial values for geo_code in geo_codes: data['%s-%s' % (geo_level, geo_code)] = { 'estimate': {}, 'error': {}} session = get_session() try: geo_values = None rows = session\ .query(self.model)\ .filter(self.model.c.geo_level == geo_level)\ .filter(self.model.c.geo_code.in_(geo_codes))\ .all() for row in rows: geo_values = data['%s-%s' % (geo_level, row.geo_code)] for col in self.columns.iterkeys(): geo_values['estimate'][col] = getattr(row, col) geo_values['error'][col] = 0 finally: session.close() return data
def get_census_profile(geo_code, geo_level, profile_name=None): session = get_session() try: geo_summary_levels = geo_data.get_summary_geo_info(geo_code, geo_level) data = {} for section in PROFILE_SECTIONS: function_name = 'get_%s_profile' % section if function_name in globals(): func = globals()[function_name] data[section] = func(geo_code, geo_level, session) # get profiles for province and/or country for level, code in geo_summary_levels: # merge summary profile into current geo profile merge_dicts(data[section], func(code, level, session), level) finally: session.close() if geo_level != 'vdc': group_remainder(data['demographics']['language_distribution'], 10) group_remainder(data['demographics']['ethnic_distribution'], 10) return data
def store_values(self): session = get_session() count = 0 for row in self.reader: model_row = {} count += 1 geo_code, geo_level = self.get_geo_data(row['geography']) model_row['geo_version'] = self.geo_version model_row['geo_code'] = geo_code model_row['geo_level'] = geo_level for col in self.columns: model_row[col] = row[col] if row['total'] == 'no data': model_row['total'] = None else: model_row['total'] = round(float( row['total']), 2) if self.value_type == 'Float' else int( round(float(row['total']))) # self.stdout.write("%s-%s" % (row['geo_level'], row['geo_code']) print model_row entry = self.table.model(**model_row) if not self.dryrun: session.add(entry) if count % 100 == 0: session.flush() if not self.dryrun: session.commit() session.close()
def get_census_profile(geo, profile_name, request): geo.version = str(geo.version) session = get_session() try: data = {} sections = [] selected_sections = [] for cat in SECTIONS: sections.extend(SECTIONS[cat]['profiles']) for section in sections: section = section.lower().replace(' ', '_') function_name = 'get_%s_profile' % section if function_name in globals(): func = globals()[function_name] data[section] = func(geo, session) # tweaks to make the data nicer # show X largest groups on their own and group the rest as 'Other' if 'households' in sections: group_remainder(data['households']['roofing_material_distribution'], 5) group_remainder(data['households']['wall_material_distribution'], 5) data['all_sections'] = SECTIONS if (selected_sections == []): selected_sections = sections data['raw_selected_sections'] = selected_sections data['selected_sections'] = [x.replace(' ','_').lower() for x in selected_sections] return data finally: session.close()
def forwards(apps, schema_editor): """ Ensure all data tables have the new geo_version column, with a default of '' """ session = get_session() inspector = inspect(session.bind) try: for data_table in DATA_TABLES.itervalues(): db_model = data_table.model table = db_model.__table__ cols = [c['name'] for c in inspector.get_columns(table.name)] if 'geo_version' in cols: continue # remove the old primary key constraint, if any pk = inspector.get_pk_constraint(table.name)['name'] if pk: session.execute("ALTER TABLE %s DROP CONSTRAINT %s" % (table.name, pk)) # add the new column session.execute( "ALTER TABLE %s ADD COLUMN geo_version VARCHAR(100) DEFAULT ''" % table.name) # add the correct new constraint session.execute(AddConstraint(table.primary_key)) session.commit() finally: session.close()
def get_census_profile(geo_code, geo_level, profile_name=None): session = get_session() try: geo_summary_levels = geo_data.get_summary_geo_info(geo_code, geo_level) data = {} for section in PROFILE_SECTIONS: function_name = 'get_%s_profile' % section if function_name in globals(): func = globals()[function_name] data[section] = func(geo_code, geo_level, session) # get profiles for province and/or country for level, code in geo_summary_levels: # merge summary profile into current geo profile merge_dicts(data[section], func(code, level, session), level) # tweaks to make the data nicer # show X largest groups on their own and group the rest as 'Other' #group_remainder(data['households']['roofing_material_distribution'], 5) #group_remainder(data['households']['wall_material_distribution'], 5) return data finally: session.close()
def get_census_profile(geo_code, geo_level, profile_name=None): logger.info('Begin of transaction for {}: {}'.format(geo_level, geo_code)) session = get_session() try: geo_summary_levels = geo_data.get_summary_geo_info(geo_code, geo_level) data = {} for section in PROFILE_SECTIONS: function_name = 'get_%s_profile' % section if function_name in globals(): func = globals()[function_name] data[section] = func(geo_code, geo_level, session) # get profiles for province and/or country for level, code in geo_summary_levels: # merge summary profile into current geo profile merge_dicts(data[section], func(code, level, session), level) return data finally: logger.info('End of transaction for {}: {}'.format( geo_level, geo_code)) session.close()
def get_elections_profile(geo_code, geo_level): data = OrderedDict() session = get_session() try: geo_summary_levels = geo_data.get_summary_geo_info(geo_code, geo_level) for election in ELECTIONS: section = election['name'].lower().replace(' ', '_') data[section] = get_election_data(geo_code, geo_level, election, session) # get profiles for province and/or country for level, code in geo_summary_levels: # merge summary profile into current geo profile merge_dicts(data[section], get_election_data(code, level, election, session), level) # tweaks to make the data nicer # show 8 largest parties on their own and group the rest as 'Other' group_remainder(data[section]['party_distribution'], 9) if geo_level == 'country': add_elections_media_coverage(data) return data finally: session.close()
def get_census_profile(geo, profile_name, request): geo.version = str(geo.version) session = get_session() year = current_context().get('year') try: data = {} sections = [] selected_sections = [] for cat in SECTIONS: sections.extend(SECTIONS[cat]['profiles']) for section in sections: section = section.lower().replace(' ', '_') function_name = 'get_%s_profile' % section if function_name in globals(): func = globals()[function_name] data[section] = func(geo, session) # tweaks to make the data nicer # show X largest groups on their own and group the rest as 'Other' if 'households' in sections: group_remainder(data['households']['roofing_material_distribution'], 5) group_remainder(data['households']['wall_material_distribution'], 5) data['all_sections'] = SECTIONS data['primary_release_year'] = year if (selected_sections == []): selected_sections = sections data['raw_selected_sections'] = selected_sections data['selected_sections'] = [x.replace(' ','_').lower() for x in selected_sections] data['afrobarometer'] = get_afrobarometer_profile(geo, session) return data finally: session.close()
def get_timeseries_profile(geo, profile_name, request): session = get_session() try: comparative_geos = geo_data.get_comparative_geos(geo) data = {} for section in PROFILE_SECTIONS: function_name = 'get_%s_profile' % section if function_name in globals(): func = globals()[function_name] data[section] = func( geo, session, request) # calling get_PROFILE_SECTIONS_profile # get profiles for province and/or country for comp_geo in comparative_geos: try: merge_dicts(data[section], func(comp_geo, session, request), comp_geo.geo_level) except KeyError as e: msg = "Error merging data into %s for section '%s' from %s: KeyError: %s" % ( geo.geoid, section, comp_geo.geoid, e) log.fatal(msg, exc_info=e) raise ValueError(msg) finally: session.close() return data
def store_values(self): session = get_session() count = 0 for geo_name, values in self.read_rows(): count += 1 geo_level, geo_code = self.determine_geo_id(geo_name) self.stdout.write("%s-%s" % (geo_level, geo_code)) for category, value in zip(self.categories, values): # prepare the dict of args to pass to the db model for this row kwargs = { 'geo_level': geo_level, 'geo_code': geo_code, } kwargs.update(dict((f, v) for f, v in zip(self.fields, category))) if value == '-': value = '0' kwargs['total'] = round(float(value.replace(',', ''))) # create and add the row self.debug(kwargs) entry = self.table.get_model(geo_level)(**kwargs) if not self.dryrun: session.add(entry) if count % 100 == 0: session.flush() if not self.dryrun: session.commit() session.close()
def get_profile(geo, profile_name, request): session = get_session() try: comparative_geos = geo_data.get_comparative_geos(geo) data = {} data["primary_release_year"] = current_context().get("year") sections = list(PROFILE_SECTIONS) for section in sections: function_name = "get_%s_profile" % section if function_name in globals(): func = globals()[function_name] data[section] = func(geo, session) # if section == "indicator": # # get profiles for comparative geometries # for comp_geo in comparative_geos: # try: # merge_dicts( # data[section], # func(comp_geo, session), # comp_geo.geo_level, # ) # except KeyError as e: # msg = ( # "Error merging data into %s for section '%s' from %s: KeyError: %s" # % (geo.geoid, section, comp_geo.geoid, e) # ) # log.fatal(msg, exc_info=e) # raise ValueError(msg) finally: session.close() return data
def store_values(self): session = get_session() count = 0 for row in self.reader: count += 1 row['geo_version'] = self.geo_version if row['total'] == 'no data': row['total'] = None else: row['total'] = round(float(row['total']), 1) if self.value_type == 'Float' else int( round(float(row['total']))) self.stdout.write("%s-%s" % (row['geo_level'], row['geo_code'])) entry = self.table.model(**row) if not self.dryrun: session.add(entry) if count % 100 == 0: session.flush() if not self.dryrun: session.commit() session.close()
def embed(request, geo_level, geo_code): # Getting the session session = get_session() schools = {} # Choosing sorting option if geo_level == "country": rank_column = Base.metadata.tables[ 'secondary_schools'].c.national_rank_all elif geo_level == "region": rank_column = Base.metadata.tables[ 'secondary_schools'].c.regional_rank_all elif geo_level == "district": rank_column = Base.metadata.tables[ 'secondary_schools'].c.district_rank_all # Fetching schools best_schools = session.query(Base.metadata.tables['secondary_schools'])\ .filter(Base.metadata.tables['secondary_schools'].c.geo_level == geo_level)\ .filter(Base.metadata.tables['secondary_schools'].c.geo_code == geo_code)\ .order_by(asc(cast(rank_column, Integer)))\ .all() worst_schools = session.query(Base.metadata.tables['secondary_schools'])\ .filter(Base.metadata.tables['secondary_schools'].c.geo_level == geo_level)\ .filter(Base.metadata.tables['secondary_schools'].c.geo_code == geo_code)\ .order_by(desc(cast(rank_column, Integer)))\ .all() schools['best_schools'] = best_schools schools['worst_schools'] = worst_schools return render(request, 'embed.html', {'schools': schools})
def raw_data_for_geos(self, geos): # initial values data = {('%s-%s' % (geo.geo_level, geo.geo_code)): { 'estimate': {}, 'error': {} } for geo in geos} session = get_session() try: geo_values = None rows = session\ .query(self.model)\ .filter(or_(and_( self.model.geo_level == g.geo_level, self.model.geo_code == g.geo_code, self.model.geo_version == g.version) for g in geos))\ .all() for row in rows: geo_values = data['%s-%s' % (row.geo_level, row.geo_code)] for col in self.columns.iterkeys(): geo_values['estimate'][col] = getattr(row, col) geo_values['error'][col] = 0 finally: session.close() return data
def forwards(apps, schema_editor): """ Ensure all data tables have the new geo_version column, with a default of '' """ session = get_session() inspector = inspect(session.bind) try: for data_table in DATA_TABLES.itervalues(): db_model = data_table.model table = db_model.__table__ cols = [c['name'] for c in inspector.get_columns(table.name)] if 'geo_version' in cols: continue # remove the old primary key constraint, if any pk = inspector.get_pk_constraint(table.name)['name'] if pk: session.execute("ALTER TABLE %s DROP CONSTRAINT %s" % (table.name, pk)) # add the new column session.execute("ALTER TABLE %s ADD COLUMN geo_version VARCHAR(100) DEFAULT ''" % table.name) # add the correct new constraint session.execute(AddConstraint(table.primary_key)) session.commit() finally: session.close()
def raw_data_for_geos(self, geos): # initial values data = {('%s-%s' % (geo.geo_level, geo.geo_code)): { 'estimate': {}, 'error': {}} for geo in geos} session = get_session() try: geo_values = None rows = session\ .query(self.model)\ .filter(or_(and_( self.model.geo_level == g.geo_level, self.model.geo_code == g.geo_code, self.model.geo_version == g.version) for g in geos))\ .all() for row in rows: geo_values = data['%s-%s' % (row.geo_level, row.geo_code)] for col in self.columns.iterkeys(): geo_values['estimate'][col] = getattr(row, col) geo_values['error'][col] = 0 finally: session.close() return data
def get_census_profile(geo_code, geo_level, profile_name=None): session = get_session() try: geo_summary_levels = geo_data.get_summary_geo_info(geo_code, geo_level) data = {} for section in PROFILE_SECTIONS: function_name = 'get_%s_profile' % section if function_name in globals(): func = globals()[function_name] data[section] = func(geo_code, geo_level, session) # get profiles for province and/or country for level, code in geo_summary_levels: # merge summary profile into current geo profile merge_dicts(data[section], func(code, level, session), level) # tweaks to make the data nicer # show X largest groups on their own and group the rest as 'Other' group_remainder(data['households']['roofing_material_distribution'], 5) group_remainder(data['households']['wall_material_distribution'], 5) return data finally: session.close()
def get_profile(geo, profile_name, request, year): session = get_session() data = {} try: data['schools'] = get_schools_profile(geo, session, year) return data finally: session.close()
def _build_model_from_fields(self, fields, table_name, geo_level=None): """ Generates an ORM model for arbitrary census fields by geography. :param list fields: the census fields in `api.models.tables.FIELD_TABLE_FIELDS`, e.g. ['highest educational level', 'type of sector'] :param str table_name: the name of the database table :param str geo_level: one of the geographics levels defined in `api.base.geo_levels`, e.g. 'province', or None if the table doesn't use them :return: ORM model class containing the given fields with type String(128), a 'total' field with type Integer and '%(geo_level)s_code' with type ForeignKey('%(geo_level)s.code') :rtype: Model """ if table_name in _census_table_models: return _census_table_models[table_name] # We build this array in a particular order, with the geo-related fields first, # to ensure that SQLAlchemy creates the underlying table with the compound primary # key columns in the correct order: # # geo_level, geo_code, field, [field, field, ...] # # This means postgresql will use the first two elements of the compound primary # key -- geo_level and geo_code -- when looking up values for a particular # geograhy. This saves us from having to create a secondary index. table_args = [] if geo_level: # primary/foreign keys table_args.append( Column( "%s_code" % geo_level, String(10), ForeignKey("%s.code" % geo_level), primary_key=True, index=True ) ) else: # will form a compound primary key on the fields, and the geo id table_args.append(Column("geo_level", String(15), nullable=False, primary_key=True)) table_args.append(Column("geo_code", String(10), nullable=False, primary_key=True)) # Now add the columns table_args.extend(Column(field, String(128), primary_key=True) for field in fields) # and the value column table_args.append(Column("total", Integer, nullable=False)) # create the table model class Model(Base): __table__ = Table(table_name, Base.metadata, *table_args) _census_table_models[table_name] = Model # ensure it exists in the DB session = get_session() try: Model.__table__.create(session.get_bind(), checkfirst=True) finally: session.close() return Model
def get_context_data(self, *args, **kwargs): session = get_session() page_context = {} code = self.kwargs.get('code', None) #Fetch school perfromance over the year school_results = session.query(Base.metadata.tables['secondary_school'])\ .filter(Base.metadata.tables['secondary_school'].c.geo_level == "region")\ .filter(Base.metadata.tables['secondary_school'].c.code == code)\ .all() # Fetching schools school = session.query(Base.metadata.tables['secondary_school'])\ .filter(Base.metadata.tables['secondary_school'].c.geo_level == "region")\ .filter(Base.metadata.tables['secondary_school'].c.year_of_result == self.year)\ .filter(Base.metadata.tables['secondary_school'].c.code == code)\ .one() #get school coordinates coordinates = session.query(Base.metadata.tables['secondary_school'].c.code, Base.metadata.tables['secondary_school'].c.name, Base.metadata.tables['secondary_school'].c.longitude, Base.metadata.tables['secondary_school'].c.latitude )\ .filter(Base.metadata.tables['secondary_school'].c.geo_level == "region")\ .filter(Base.metadata.tables['secondary_school'].c.code == code)\ .filter(Base.metadata.tables['secondary_school'].c.year_of_result == self.year)\ .filter(Base.metadata.tables['secondary_school'].c.longitude != 'UNKNOWN')\ .filter(Base.metadata.tables['secondary_school'].c.latitude != 'UNKNOWN')\ .all() #Fetch the region where school is try: self.geo_level = 'region' self.geo_code = school.geo_code version = '2009' self.geo = geo_data.get_geography(self.geo_code, self.geo_level, version) except (ValueError, Exception): raise Http404 # load the profile # profile_method = settings.WAZIMAP.get('profile_builder', None) # self.profile_name = settings.WAZIMAP.get('default_profile', 'default') # # if not profile_method: # raise ValueError("You must define WAZIMAP.profile_builder in settings.py") # profile_method = import_string(profile_method) # profile_data = profile_method(self.geo, self.profile_name, self.request) profile_data = {} profile_data['geography'] = self.geo.as_dict_deep() profile_data['coordinates'] = json.dumps(coordinates, cls=DjangoJSONEncoder) profile_data['school'] = school profile_data['year'] = self.year profile_data['school_results'] = school_results profile_data = enhance_api_data(profile_data) page_context.update(profile_data) profile_data_json = SafeString( json.dumps(profile_data, cls=DjangoJSONEncoder)) page_context.update({'profile_data_json': profile_data_json}) session.close() return page_context
def get_context_data(self, *args, **kwargs): session = get_session() page_context = {} code = self.kwargs.get('code', None) #Fetch school perfromance over the year school_results = session.query(Base.metadata.tables['secondary_school'])\ .filter(Base.metadata.tables['secondary_school'].c.geo_level == "region")\ .filter(Base.metadata.tables['secondary_school'].c.code == code)\ .all() # Fetching schools school = session.query(Base.metadata.tables['secondary_school'])\ .filter(Base.metadata.tables['secondary_school'].c.geo_level == "region")\ .filter(Base.metadata.tables['secondary_school'].c.year_of_result == self.year)\ .filter(Base.metadata.tables['secondary_school'].c.code == code)\ .one() #get school coordinates coordinates = session.query(Base.metadata.tables['secondary_school'].c.code, Base.metadata.tables['secondary_school'].c.name, Base.metadata.tables['secondary_school'].c.longitude, Base.metadata.tables['secondary_school'].c.latitude )\ .filter(Base.metadata.tables['secondary_school'].c.geo_level == "region")\ .filter(Base.metadata.tables['secondary_school'].c.code == code)\ .filter(Base.metadata.tables['secondary_school'].c.year_of_result == self.year)\ .filter(Base.metadata.tables['secondary_school'].c.longitude != 'UNKNOWN')\ .filter(Base.metadata.tables['secondary_school'].c.latitude != 'UNKNOWN')\ .all() #Fetch the region where school is try: self.geo_level = 'region' self.geo_code = school.geo_code version = '2009' self.geo = geo_data.get_geography(self.geo_code, self.geo_level, version) except (ValueError, Exception): raise Http404 # load the profile # profile_method = settings.WAZIMAP.get('profile_builder', None) # self.profile_name = settings.WAZIMAP.get('default_profile', 'default') # # if not profile_method: # raise ValueError("You must define WAZIMAP.profile_builder in settings.py") # profile_method = import_string(profile_method) # profile_data = profile_method(self.geo, self.profile_name, self.request) profile_data = {} profile_data['geography'] = self.geo.as_dict_deep() profile_data['coordinates'] = json.dumps(coordinates, cls=DjangoJSONEncoder) profile_data['school'] = school profile_data['year'] = self.year profile_data['school_results'] = school_results profile_data = enhance_api_data(profile_data) page_context.update(profile_data) profile_data_json = SafeString(json.dumps(profile_data, cls=DjangoJSONEncoder)) page_context.update({ 'profile_data_json': profile_data_json }) session.close() return page_context
def get_profile(geo, profile_name, request): year = current_context().get('year') session = get_session() data = {} try: data['demographics'] = get_population(geo, session) data['primary_release_year'] = year data['afrobarometer'] = get_afrobarometer_profile(geo, session) return data finally: session.close()
def embed(request, geo_level, geo_code): # Getting the session session = get_session() year = '2017' schools = [] try: schools = get_overall_topschools(year, geo_level, geo_code, session) finally: session.close() return render(request, 'leaguetable/embed.html',{'schools':schools})
def get_profile(geo, profile_name, request): session = get_session() try: comp_geos = geo_data.get_comparative_geos(geo) data = {} sections = list(PROFILE_SECTIONS) if geo.geo_level not in [ 'country', 'province', 'district', 'municipality' ]: pass # Raise error as we don't have this data """ The following is temporary and enables us to determine what to display for geos: Within WC: All indicators, with WC as root comparisson geo Outside WC: Some indicators, with ZA as root comparrison geo This is beacause Wazimap expects data for all geos. This will be removed once we have imported all the data. """ # There are datasets with only WC information display_profile = 'WC' if ( geo.geo_code == 'WC' or 'WC' in [cg.geo_code for cg in comp_geos]) else 'ZA' data['display_profile'] = display_profile data['primary_release_year'] = current_context().get('year') for section in sections: function_name = 'get_%s_profile' % section if function_name in globals(): func = globals()[function_name] data[section] = func(geo, session, display_profile) # get profiles for province and/or country for comp_geo in comp_geos: # merge summary profile into current geo profile merge_dicts( data[section], func(comp_geo, session, display_profile, comparative=True), comp_geo.geo_level) # Make data look nicer on profile page group_remainder(data['demographics']['youth_population_by_language'], 11) return data finally: session.close()
def specific_school(request, code): # Getting the session session = get_session() # Fetching schools school = session.query(Base.metadata.tables['secondary_schools'])\ .filter(Base.metadata.tables['secondary_schools'].c.geo_level == "country")\ .filter(Base.metadata.tables['secondary_schools'].c.code == code)\ .one() return render(request, 'specific_school.html', {'school': school})
def schools(request): # Getting the session session = get_session() # Fetching schools schools = session.query(Base.metadata.tables['secondary_schools'])\ .filter(Base.metadata.tables['secondary_schools'].c.geo_level == "country")\ .order_by(asc(cast(Base.metadata.tables['secondary_schools'].c.national_rank_all, Integer)))\ .all() return render(request, 'schools.html', {'schools': schools})
def store_values(self): session = get_session() count = 0 stored_values = {} for geo_name, values in self.read_rows(): if all(not val for val in values): break count += 1 geo_level, geo_code = self.determine_geo_id(geo_name) self.stdout.write("%s-%s" % (geo_level, geo_code)) for category, value in zip(self.categories, values): # prepare the dict of args to pass to the db model for this row kwargs = { 'geo_level': geo_level, 'geo_code': geo_code, 'geo_version': self.geo_version, } kwargs.update( dict((f, v) for f, v in zip(self.fields, category))) if value == '-': value = '0' total = round(float(value.replace(',', ''))) stored_key = tuple(sorted(list(kwargs.items()))) if stored_key in stored_values: if stored_values[stored_key] == total: self.stdout.write( "Skipping already-added value for key %r" % list(stored_key)) continue else: raise Exception( "Different value %r != %r for duplicate key %r" % (stored_values[stored_key], total, stored_key)) stored_values[stored_key] = total kwargs['total'] = total # create and add the row self.debug(kwargs) entry = self.table.model(**kwargs) if not self.dryrun: session.add(entry) if count % 100 == 0: session.flush() if not self.dryrun: session.commit() session.close()
def index(request): #Getting the session session = get_session() schools = {} template_name = 'leaguetable/homepage.html' geo_level = "country" geo_code = "TZ" #Sorting will only be done using national_rank all, as regional and district ranks are unknown for some result esp historical rank_column = Base.metadata.tables['secondary_school'].c.national_rank_all # Getting top for schools with more than 40 students top_schools_40_more = session.query(Base.metadata.tables['secondary_school'])\ .filter(Base.metadata.tables['secondary_school'].c.geo_level == geo_level)\ .filter(Base.metadata.tables['secondary_school'].c.geo_code == geo_code)\ .filter(Base.metadata.tables['secondary_school'].c.more_than_40 == "yes")\ .order_by(asc(cast(rank_column, Integer)))\ .all() # Getting top for schools with less than 40 students top_schools_40_less = session.query(Base.metadata.tables['secondary_school'])\ .filter(Base.metadata.tables['secondary_school'].c.geo_level == geo_level)\ .filter(Base.metadata.tables['secondary_school'].c.geo_code == geo_code)\ .filter(Base.metadata.tables['secondary_school'].c.more_than_40 == "no")\ .order_by(asc(cast(rank_column, Integer)))\ .all() # Getting lowest schools with more than 40 students lowest_schools_40_more = session.query(Base.metadata.tables['secondary_school'])\ .filter(Base.metadata.tables['secondary_school'].c.geo_level == geo_level)\ .filter(Base.metadata.tables['secondary_school'].c.geo_code == geo_code)\ .filter(Base.metadata.tables['secondary_school'].c.more_than_40 == "yes")\ .order_by(desc(cast(rank_column, Integer)))\ .all() # Getting lowest for schools with less than 40 students lowest_schools_40_less = session.query(Base.metadata.tables['secondary_school'])\ .filter(Base.metadata.tables['secondary_school'].c.geo_level == geo_level)\ .filter(Base.metadata.tables['secondary_school'].c.geo_code == geo_code)\ .filter(Base.metadata.tables['secondary_school'].c.more_than_40 == "no")\ .order_by(desc(cast(rank_column, Integer)))\ .all() schools['best_schools_more_40'] = top_schools_40_more schools['worst_schools_more_40'] = lowest_schools_40_more schools['best_schools_less_40'] = top_schools_40_less schools['worst_schools_less_40'] = lowest_schools_40_less return render(request, 'leaguetable/homepage.html', { 'schools': schools, 'root_geo': geo_data.root_geography() })
def get_profile(geo, profile_name, request): session = get_session() data = {} try: data['budget'] = get_equitable_allocation_data(geo, session) data['expenditure_ceilings'] = get_county_expenditure_ceilings(geo, session) data['conditional_grants_15_16'] = get_conditional_allocation_2015_2016( geo, session) return data finally: session.close()
def get_profile(geo, profile_name, request): session = get_session() data = {} try: data['demographics'] = get_demographics_profile(geo, session) data['households'] = get_households_profile(geo, session) data['disability'] = get_disabilities_profile(geo, session) data['elections2016'] = get_elections2016_profile(geo, session) return data finally: session.close()
def ensure_db_table_exists(self): """ Ensure that the actual database table behind the db_table link to this release exists. """ if not (self.db_table and self.data_table and self.release): return db_table = self.data_table.get_db_table(release=self.release) # ensure it exists in the DB session = get_session() try: db_table.model.__table__.create(session.get_bind(), checkfirst=True) finally: session.close()
def store_values(self): session = get_session() count = 0 stored_values = {} for geo_name, values in self.read_rows(): if all(not val for val in values): break count += 1 geo_level, geo_code = self.determine_geo_id(geo_name) self.stdout.write("%s-%s" % (geo_level, geo_code)) for category, value in zip(self.categories, values): # prepare the dict of args to pass to the db model for this row kwargs = { 'geo_level': geo_level, 'geo_code': geo_code, 'geo_version': self.geo_version, } kwargs.update(dict((f, v) for f, v in zip(self.fields, category))) if value == '-': value = '0' total = round(float(value.replace(',', ''))) stored_key = tuple(sorted(list(kwargs.items()))) if stored_key in stored_values: if stored_values[stored_key] == total: self.stdout.write("Skipping already-added value for key %r" % list(stored_key)) continue else: raise Exception("Different value %r != %r for duplicate key %r" % (stored_values[stored_key], total, stored_key)) stored_values[stored_key] = total kwargs['total'] = total # create and add the row self.debug(kwargs) entry = self.table.model(**kwargs) if not self.dryrun: session.add(entry) if count % 100 == 0: session.flush() if not self.dryrun: session.commit() session.close()
def get_profile(geo, profile_name, request): session = get_session() data = {} year = current_context().get('year') try: data['primary_release_year'] = year data['demographics'] = get_demographics_profile(geo, session, year) data['households'] = get_households_profile(geo, session, year) data['disability'] = get_disabilities_profile(geo, session, year) data['elections2016'] = get_elections2016_profile(geo, session) data['afrobarometer'] = get_afrobarometer_profile(geo, session) return data finally: session.close()
def schools(request): # Getting the session session = get_session() year = '2017' schools = [] # Fetching schools try: schools = session.query(Base.metadata.tables['secondary_school'])\ .filter(Base.metadata.tables['secondary_school'].c.geo_level == "country")\ .filter(Base.metadata.tables['secondary_school'].c.year_of_result == year)\ .order_by(asc(cast(Base.metadata.tables['secondary_school'].c.national_rank_all, Integer)))\ .all() finally: session.close() return render(request,'leaguetable/schools.html',{'schools':schools})
def get_profile(geo, profile_name, request): session = get_session() try: comp_geos = geo_data.get_comparative_geos(geo) data = {} sections = list(PROFILE_SECTIONS) if geo.geo_level not in ['country', 'province', 'district', 'municipality']: pass # Raise error as we don't have this data """ The following is temporary and enables us to determine what to display for geos: Within WC: All indicators, with WC as root comparisson geo Outside WC: Some indicators, with ZA as root comparrison geo This is beacause Wazimap expects data for all geos. This will be removed once we have imported all the data. """ # There are datasets with only WC information display_profile = 'WC' if (geo.geo_code == 'WC' or 'WC' in [cg.geo_code for cg in comp_geos]) else 'ZA' data['display_profile'] = display_profile data['primary_release_year'] = current_context().get('year') for section in sections: function_name = 'get_%s_profile' % section if function_name in globals(): func = globals()[function_name] data[section] = func(geo, session, display_profile) # get profiles for province and/or country for comp_geo in comp_geos: # merge summary profile into current geo profile merge_dicts(data[section], func(comp_geo, session, display_profile, comparative=True), comp_geo.geo_level) # Make data look nicer on profile page group_remainder(data['demographics']['youth_population_by_language'], 11) return data finally: session.close()
def get_profile(geo_code, geo_level, profile_name=None): session = get_session() try: geo_summary_levels = geo_data.get_summary_geo_info(geo_code, geo_level) data = {} for section in PROFILE_SECTIONS: function_name = 'get_%s_profile' % section if function_name in globals(): func = globals()[function_name] data[section] = func(geo_code, geo_level, session) for level, code in geo_summary_levels: merge_dicts(data[section], func(code, level, session), level) return data finally: session.close()
def store_values(self): session = get_session() count = 0 for row in self.reader: count += 1 # Round to nearest number as we're expecting Integers. row['total'] = int(round(float(row['total']))) self.stdout.write("%s-%s" % (row['geo_level'], row['geo_code'])) entry = self.table.get_model(row['geo_level'])(**row) if not self.dryrun: session.add(entry) if count % 100 == 0: session.flush() if not self.dryrun: session.commit() session.close()
def store_values(self): session = get_session() count = 0 geo = None totals = defaultdict(float) for row in self.reader: count += 1 row['geo_version'] = self.geo_version if row['total'] == 'no data': row['total'] = None else: row['total'] = round(float(row['total']), 1) if self.value_type == 'Float' else int(round(float(row['total']))) if self.add_to_100 == True: geo = row['geo_level'], row['geo_code'] field_values = tuple(row[field] for field in self.fields[:-1]) key = (geo + field_values) if row['total']: totals[key] += row['total'] if totals[key] > 100: diff = totals[key] - 100 row['total'] = row['total'] - diff if self.value_type == 'Float' and row['total']: row['total'] = str(row['total']) self.stdout.write("%s-%s" % (row['geo_level'], row['geo_code'])) entry = self.table.model(**row) if not self.dryrun: session.add(entry) if count % 100 == 0: session.flush() if not self.dryrun: session.commit() session.close()
def get_profile(geo_code, geo_level, profile_name=None): session = get_session() try: geo_summary_levels = geo_data.get_summary_geo_info(geo_code, geo_level) data = {} sections = list(PROFILE_SECTIONS) if geo_level in ['country', 'province']: sections.append('crime') for section in sections: function_name = 'get_%s_profile' % section if function_name in globals(): func = globals()[function_name] data[section] = func(geo_code, geo_level, session) # get profiles for province and/or country for level, code in geo_summary_levels: # merge summary profile into current geo profile merge_dicts(data[section], func(code, level, session), level) finally: session.close() # tweaks to make the data nicer # show 3 largest groups on their own and group the rest as 'Other' group_remainder(data['service_delivery']['water_source_distribution'], 5) group_remainder(data['service_delivery']['refuse_disposal_distribution'], 5) group_remainder(data['service_delivery']['toilet_facilities_distribution'], 5) group_remainder(data['demographics']['language_distribution'], 7) group_remainder(data['demographics']['province_of_birth_distribution'], 7) group_remainder(data['demographics']['region_of_birth_distribution'], 5) group_remainder(data['households']['type_of_dwelling_distribution'], 5) group_remainder(data['child_households']['type_of_dwelling_distribution'], 5) data['elections'] = get_elections_profile(geo_code, geo_level) return data
def _build_model_from_fields(self, fields, db_table, value_type=Integer): ''' Generates an ORM model for arbitrary census fields by geography. :param list fields: the table fields e.g. ['highest educational level', 'type of sector'] :param str db_table: the name of the database table :param value_type: The value type of the total column. :return: ORM model class :rtype: Model ''' # does it already exist? model = get_model_for_db_table(db_table) if model: return model columns = self._build_model_columns(fields, value_type) # create the table model class Model(Base): __table__ = Table(db_table, Base.metadata, *columns, extend_existing=True) # ensure it exists in the DB session = get_session() try: Model.__table__.create(session.get_bind(), checkfirst=True) finally: session.close() DB_MODELS[db_table] = Model return Model # Now add the field columns columns.extend(Column(field, String(128), primary_key=True) for field in fields) # and the value column columns.append(Column('total', value_type, nullable=True))
def get_land_profile(geo, profile_name, request): session = get_session() try: comparative_geos = geo_data.get_comparative_geos(geo) data = {} sections = list(PROFILE_SECTIONS) for section in sections: function_name = 'get_%s_profile' % section if function_name in globals(): func = globals()[function_name] data[section] = func(geo, session) # get profiles for comparative geometries if not data[section]['is_missing']: for comp_geo in comparative_geos: try: merge_dicts( data[section], func( comp_geo, session), comp_geo.geo_level) except KeyError as e: msg = "Error merging data into %s for section '%s' from %s: KeyError: %s" % ( geo.geoid, section, comp_geo.geoid, e) log.fatal(msg, exc_info=e) raise ValueError(msg) data['land_audit'] = get_land_audit_profile(geo, session) if geo.geo_level in ['district', 'municipality']: data['districtdistribution'] = get_districtdistribution_profile(geo, session) return data finally: session.close()