def update_levels(country_code='LB'): """ Updates local admin level lookup tables from AI. These lookup tables are used when creating sites for AI. """ client = ActivityInfoClient() for level in client.get_admin_levels(country_code): entities = client.get_entities(level['id']) for entity in entities: ai[level['name']].update( {'_id': entity['id']}, entity, upsert=True) print 'Updated entity {}: {}'.format( level['name'], entity['name'].encode('UTF-8') ) for site_type in client.get_location_types(country_code): locations = client.get_locations(site_type['id']) for location in locations: ai.locations.update( {'_id': location['id']}, location, upsert=True) print 'Updated {}: {}'.format( site_type['name'].encode('UTF-8'), location['name'].encode('UTF-8') )
def update_ai_locations(type_id, username='', password=''): client = ActivityInfoClient(username, password) updated_location = 0 for location in ai.locations.find({'ai_name': {'$regex': 'PG'}}): payload = { 'id': int(random.getrandbits(31)), 'locationTypeId': type_id, 'name': location['ai_name'], 'axe': '{}'.format(location['p_code']), 'latitude': location['latitude'], 'longitude': location['longitude'], 'workflowstatusid': 'validated' } for id, level in location['adminEntities'].items(): payload['E{}'.format(id)] = level['id'] response = client.call_command('CreateLocation', **payload) if response.status_code == requests.codes.ok: updated_location += 1 print 'Uploaded {}'.format(location['ai_name'].encode('UTF-8')) else: print 'Error for: {}'.format(location['ai_name'].encode('UTF-8')) print updated_location
def update_sites(api_key='', domain='', username='', password='', list_name='', site_type='', name_col='', code_col='', target_list=''): carto_client = CartoDBAPIKey(api_key, domain) ai_client = ActivityInfoClient(username, password) # create an index of sites by p_code existing = dict( (site['code'], dict(site, index=i)) for (i, site) in enumerate(ai_client.get_locations(target_list)) if 'code' in site) sites = carto_client.sql('select * from {}'.format(list_name)) send_message('Starting upload of {}'.format(list_name)) bad_codes = [] updated_sites = 0 for row in sites['rows']: p_code = str(row[code_col]).strip() site_name = row[name_col].encode('UTF-8') cad = ai['Cadastral Area'].find_one({'code': str(row['cad_code'])}) if cad is None: bad_codes.append(row['cad_code']) continue caz = ai['Caza'].find_one({'id': cad['parentId']}) gov = ai['Governorate'].find_one({'id': caz['parentId']}) if p_code not in existing and site_name: payload = dict(id=int(random.getrandbits(31)), locationTypeId=int(target_list), name='{}: {}'.format(site_type, site_name)[0:40], axe='{}'.format(p_code), latitude=row['latitude'], longitude=row['longitude'], workflowstatusid='validated') payload['E{}'.format(gov['levelId'])] = gov['id'] payload['E{}'.format(caz['levelId'])] = caz['id'] payload['E{}'.format(cad['levelId'])] = cad['id'] response = ai_client.call_command('CreateLocation', **payload) if response.status_code == requests.codes.no_content: updated_sites += 1 print 'Updated {}'.format(payload['name']) else: print 'Error for {}'.format(payload['name']) print 'Bad codes: {}'.format(bad_codes) print 'Updated sites: {}'.format(updated_sites) send_message('Updated {} sites'.format(updated_sites))
def import_reports(self): client = ActivityInfoClient(self.username, self.password) reports = 0 # Select all indicators that are included in Active PCAs, # and have linked indicators and a matching partner in AI for progress in IndicatorProgress.objects.filter( programmed__gt=0, pca_sector__pca__status__in=[PCA.ACTIVE, PCA.IMPLEMENTED], indicator__activity_info_indicators__isnull=False, pca_sector__pca__partner__activity_info_partner__isnull=False): # for each selected indicator get the related AI indicators (one-to-many) for ai_indicator in progress.indicator.activity_info_indicators.all( ): attributes = ai_indicator.activity.attributegroup_set.all() funded_by = attributes.get(name='Funded by') # query AI for matching site records for partner, activity, indicator sites = client.get_sites( partner=progress.pca.partner.activity_info_partner.ai_id if progress.pca.partner.activity_info_partner else None, activity=ai_indicator.activity.ai_id, indicator=ai_indicator.ai_id, attribute=funded_by.attribute_set.get(name='UNICEF').ai_id, ) # for those marching sites, create partner report instances for site in sites: for month, indicators in site['monthlyReports'].items(): for indicator in indicators: if indicator[ 'indicatorId'] == ai_indicator.ai_id and indicator[ 'value']: report, created = PartnerReport.objects.get_or_create( pca=progress.pca, ai_partner=progress.pca.partner. activity_info_partner, indicator=progress.indicator, ai_indicator=ai_indicator, location=site['location']['name'], month=datetime.strptime( month + '-15', '%Y-%m-%d'), indicator_value=indicator['value']) if created: reports += 1 return reports
def import_reports(self): client = ActivityInfoClient(self.username, self.password) reports = 0 # Select all indicators that are included in Active PCAs, # and have linked indicators and a matching partner in AI for progress in IndicatorProgress.objects.filter( programmed__gt=0, pca_sector__pca__status__in=[PCA.ACTIVE, PCA.IMPLEMENTED], indicator__activity_info_indicators__isnull=False, pca_sector__pca__partner__activity_info_partner__isnull=False): # for each selected indicator get the related AI indicators (one-to-many) for ai_indicator in progress.indicator.activity_info_indicators.all(): attributes = ai_indicator.activity.attributegroup_set.all() funded_by = attributes.get(name='Funded by') # query AI for matching site records for partner, activity, indicator sites = client.get_sites( partner=progress.pca.partner.activity_info_partner.ai_id if progress.pca.partner.activity_info_partner else None, activity=ai_indicator.activity.ai_id, indicator=ai_indicator.ai_id, attribute=funded_by.attribute_set.get(name='UNICEF').ai_id, ) # for those marching sites, create partner report instances for site in sites: for month, indicators in site['monthlyReports'].items(): for indicator in indicators: if indicator['indicatorId'] == ai_indicator.ai_id and indicator['value']: report, created = PartnerReport.objects.get_or_create( pca=progress.pca, ai_partner=progress.pca.partner.activity_info_partner, indicator=progress.indicator, ai_indicator=ai_indicator, location=site['location']['name'], month=datetime.strptime(month+'-15', '%Y-%m-%d'), indicator_value=indicator['value'] ) if created: reports += 1 return reports
def update_ai_locations(type_id, username='', password=''): client = ActivityInfoClient(username, password) for location in ai.locations.find(): payload = { 'id': location['ai_id'], 'locationTypeId': type_id, 'name': location['ai_name'], 'axe': '{}: {}'.format('PCode', location['p_code']), 'latitude': location['latitude'], 'longitude': location['longitude'], 'workflowstatusid': 'validated' } for id, level in location['adminEntities'].items(): payload['E{}'.format(id)] = level['id'] response = client.call_command('CreateLocation', **payload) print response
def import_data(self): """ Import all activities, indicators and partners from a ActivityInfo database specified by the AI ID """ client = ActivityInfoClient(self.username, self.password) dbs = client.get_databases() db_ids = [db['id'] for db in dbs] if self.ai_id not in db_ids: raise Exception( 'DB with ID {} not found in ActivityInfo'.format( self.ai_id )) db_info = client.get_database(self.ai_id) self.name = db_info['name'] self.description = db_info['description'] self.ai_country_id = db_info['country']['id'] self.country_name = db_info['country']['name'] self.save() objects = 0 try: for partner in db_info['partners']: try: ai_partner = Partner.objects.get(ai_id=partner['id']) except Partner.DoesNotExist: ai_partner = Partner(ai_id=partner['id']) objects += 1 ai_partner.name = partner['name'] ai_partner.full_name = partner['fullName'] ai_partner.database = self ai_partner.save() for activity in db_info['activities']: try: ai_activity = Activity.objects.get(ai_id=activity['id']) except Activity.DoesNotExist: ai_activity = Activity(ai_id=activity['id']) objects += 1 ai_activity.name = activity['name'] ai_activity.location_type = activity['locationType']['name'] ai_activity.database = self ai_activity.save() for indicator in activity['indicators']: try: ai_indicator = Indicator.objects.get(ai_id=indicator['id']) except Indicator.DoesNotExist: ai_indicator = Indicator(ai_id=indicator['id']) objects += 1 ai_indicator.name = indicator['name'] ai_indicator.units = indicator['units'] ai_indicator.category = indicator['category'] ai_indicator.activity = ai_activity ai_indicator.save() for attribute_group in activity['attributeGroups']: try: ai_attribute_group = AttributeGroup.objects.get(ai_id=attribute_group['id']) except AttributeGroup.DoesNotExist: ai_attribute_group = AttributeGroup(ai_id=attribute_group['id']) objects += 1 ai_attribute_group.name = attribute_group['name'] ai_attribute_group.multiple_allowed = attribute_group['multipleAllowed'] ai_attribute_group.mandatory = attribute_group['mandatory'] ai_attribute_group.activity = ai_activity ai_attribute_group.save() for attribute in attribute_group['attributes']: try: ai_attribute = Attribute.objects.get(ai_id=attribute['id']) except Attribute.DoesNotExist: ai_attribute = Attribute(ai_id=attribute['id']) objects += 1 ai_attribute.name = attribute['name'] ai_attribute.attribute_group = ai_attribute_group ai_attribute.save() except Exception as e: raise e return objects
def update_sites( api_key='', domain='', username='', password='', list_name='', site_type='', name_col='', code_col='', target_list='' ): carto_client = CartoDBAPIKey(api_key, domain) ai_client = ActivityInfoClient(username, password) # create an index of sites by p_code existing = dict( (site['code'], dict(site, index=i)) for (i, site) in enumerate( ai_client.get_locations(target_list) ) if 'code' in site ) sites = carto_client.sql( 'select * from {}'.format(list_name) ) send_message('Starting upload of {}'.format(list_name)) bad_codes = [] updated_sites = 0 for row in sites['rows']: p_code = str(row[code_col]).strip() site_name = row[name_col].encode('UTF-8') cad = ai['Cadastral Area'].find_one({'code': str(row['cad_code'])}) if cad is None: bad_codes.append(row['cad_code']) continue caz = ai['Caza'].find_one({'id': cad['parentId']}) gov = ai['Governorate'].find_one({'id': caz['parentId']}) if p_code not in existing and site_name: payload = dict( id=int(random.getrandbits(31)), locationTypeId=int(target_list), name='{}: {}'.format(site_type, site_name)[0:40], axe='{}'.format(p_code), latitude=row['latitude'], longitude=row['longitude'], workflowstatusid='validated' ) payload['E{}'.format(gov['levelId'])] = gov['id'] payload['E{}'.format(caz['levelId'])] = caz['id'] payload['E{}'.format(cad['levelId'])] = cad['id'] response = ai_client.call_command('CreateLocation', **payload) if response.status_code == requests.codes.no_content: updated_sites += 1 print 'Updated {}'.format(payload['name']) else: print 'Error for {}'.format(payload['name']) print 'Bad codes: {}'.format(bad_codes) print 'Updated sites: {}'.format(updated_sites) send_message('Updated {} sites'.format(updated_sites))
def import_ai(dbs, username='', password='', date=''): """ Imports data from Activity Info """ db_ids = dbs.split(',') client = ActivityInfoClient(username, password) for db_id in db_ids: reports_created = 0 db_info = client.get_database(db_id) send_message('AI import started for database: {}'.format(db_info['name'])) # 'store the whole database for future reference' db_info['_id'] = db_id ai.databases.update({'_id': db_id}, db_info, upsert=True) # 'split out all the attribute groups into a separate collection' attribs = ai.databases.aggregate([ {'$project': {'groups': '$activities.attributeGroups'}}, {'$unwind': '$groups'}, {'$unwind': '$groups'}, {'$group': {'_id': "$_id", 'groups': {'$push': '$groups'}}}, ]) for attrib in attribs['result'][0]['groups']: attrib['_id'] = attrib['id'] ai.attributeGroups.update({'_id': attrib['id']}, attrib, upsert=True) # 'create an index of sites by id' sites = dict( (site['id'], dict(site, index=i)) for (i, site) in enumerate( client.get_sites(database=db_id) ) ) # 'create an index of activities by id' activities = dict( (activity['id'], dict(activity, index=i)) for (i, activity) in enumerate( ai.databases.aggregate([ {'$match': {'_id': db_id}}, {'$unwind': '$activities'}, {'$project': { '_id': 0, 'id': '$activities.id', 'name': '$activities.name', 'category': '$activities.category', 'location': '$activities.locationType' }}, ])['result'] ) ) # 'get all reports for these activities: {}'.format(activities.keys()) if not date: # if no date provided get for the current month date = datetime.date.today().strftime('%Y-%m') send_message('Pulling reports for date: {}'.format(date)) forms = client.get_cube(activities.keys(), month=date) # 'processing {} forms'.format(len(forms)) for indicator in forms: site = sites[indicator['key']['Site']['id']] attributes = [] if 'attributes' in site: attributes = [ attr for attr in ai.attributeGroups.find( {'attributes.id': {'$in': site['attributes']}}, {'name': 1, 'mandatory': 1, "attributes.$": 1} ) ] if indicator['sum']: report, created = Report.objects.get_or_create( db_name=db_info['name'], date='{}-{}'.format( indicator['key']['Date']['year'], indicator['key']['Date']['month'], ), site_id=site['id'], activity_id=site['activity'], partner_id=site['partner']['id'], indicator_id=indicator['key']['Indicator']['id'], ) activity = activities[report.activity_id] report.value = indicator['sum'] report.category = activity['category'] report.activity = activity['name'] report.partner_name = site['partner']['name'] report.p_code = site['location']['code'] report.location_name = site['location']['name'] report.location_id = site['location']['id'] report.location_x = site['location'].get('longitude', None) report.location_y = site['location'].get('latitude', None) report.indicator_name = indicator['key']['Indicator']['label'] report.comments = site.get('comments', None) location = ai.locations.find_one({'id': report.location_id}) if location and 'adminEntities' in location: try: report.gov_code = str(location['adminEntities']['1370']['id']) report.governorate = location['adminEntities']['1370']['name'] report.district_code = str(location['adminEntities']['1521']['id']) report.district = location['adminEntities']['1521']['name'] report.cadastral_code = str(location['adminEntities']['1522']['id']) report.cadastral = location['adminEntities']['1522']['name'] except Exception as exp: pass if created: for a in attributes: report.attributes.append( Attribute( name=a['name'], value=a['attributes'][0]['name'] ) ) reports_created += 1 report.save() send_message('AI import finished, {} site reports created'.format(reports_created))
def import_ai(ai_db, username='', password=''): """ Imports data from Activity Info """ reports_created = 0 db_id = ai_db client = ActivityInfoClient(username, password) ai = MongoClient(os.environ.get( 'MONGO_URL', 'mongodb://localhost:27017'))[os.environ.get('MONGODB_DATABASE', 'ai')] # store the whole database for future reference print u'Pulling database...' db_info = client.get_database(db_id) sentry.captureMessage('AI import started for database: {}'.format( db_info['name']), level=logging.INFO) ai.databases.update({'_id': db_id}, db_info, upsert=True) # split out all the attribute groups into a separate collection attribs = ai.databases.aggregate([ { '$project': { 'groups': '$activities.attributeGroups' } }, { '$unwind': '$groups' }, { '$unwind': '$groups' }, { '$group': { '_id': "$_id", 'groups': { '$push': '$groups' } } }, ]) for attrib in attribs['result'][0]['groups']: ai.attributeGroups.update({'_id': attrib['id']}, attrib, upsert=True) for activity in ai.databases.find_one({'_id': db_id})['activities']: print u'Pulling sites for activity: {} - {}'.format( activity['id'], activity['name']) sites = client.get_sites(activity=activity['id'], include_monthly_reports=False) for site in sites: attributes = [ attr for attr in ai.attributeGroups.find( {'attributes.id': { '$in': site['attributes'] }}, { 'name': 1, 'mandatory': 1, "attributes.$": 1 }) ] print ' Pulling reports for site: {} - {}'.format( site['id'], site['location']['name'].encode('UTF-8')) reports = client.get_monthly_reports_for_site(site['id']) for date, indicators in reports.items(): for indicator in indicators: report, created = Report.objects.get_or_create( date=date, site_id=site['id'], activity_id=activity['id'], partner_id=site['partner']['id'], indicator_id=indicator['indicatorId'], ) report.value = indicator['value'] report.category = activity['category'] report.activity = activity['name'] report.partner_name = site['partner']['name'] report.location_name = site['location']['name'] report.location_id = site['location']['id'] report.location_x = site['location'].get('longitude', None) report.location_y = site['location'].get('latitude', None) report.indicator_name = indicator['indicatorName'] report.comments = site.get('comments', None) location = ai.locations.find_one( {'ai_id': report.location_id}) if location: report.p_code = location['p_code'] elif report.comments: matches = re.search(r'(\d{5}-\d?\d-\d{3})', report.comments) if matches: report.p_code = matches.group(1) if created: for a in attributes: report.attributes.append( Attribute(name=a['name'], value=a['attributes'][0]['name'])) print ' Created report: {} -> {} -> {} -> {} = {}'.format( report.date, report.location_name.encode('UTF-8'), report.partner_name.encode('UTF-8'), report.indicator_name.encode('UTF-8'), report.value) report.save() sentry.captureMessage( 'AI import finished, {} new reports created'.format(reports_created), level=logging.INFO)
def import_ai(dbs, username='', password='', date=''): """ Imports data from Activity Info """ db_ids = dbs.split(',') client = ActivityInfoClient(username, password) for db_id in db_ids: reports_created = 0 db_info = client.get_database(db_id) send_message('AI import started for database: {}'.format( db_info['name'])) # 'store the whole database for future reference' db_info['_id'] = db_id ai.databases.update({'_id': db_id}, db_info, upsert=True) # 'split out all the attribute groups into a separate collection' attribs = ai.databases.aggregate([ { '$project': { 'groups': '$activities.attributeGroups' } }, { '$unwind': '$groups' }, { '$unwind': '$groups' }, { '$group': { '_id': "$_id", 'groups': { '$push': '$groups' } } }, ]) for attrib in attribs['result'][0]['groups']: attrib['_id'] = attrib['id'] ai.attributeGroups.update({'_id': attrib['id']}, attrib, upsert=True) # 'create an index of sites by id' sites = dict( (site['id'], dict(site, index=i)) for (i, site) in enumerate(client.get_sites(database=db_id))) # 'create an index of activities by id' activities = dict( (activity['id'], dict(activity, index=i)) for (i, activity) in enumerate( ai.databases.aggregate([ { '$match': { '_id': db_id } }, { '$unwind': '$activities' }, { '$project': { '_id': 0, 'id': '$activities.id', 'name': '$activities.name', 'category': '$activities.category', 'location': '$activities.locationType' } }, ])['result'])) # 'get all reports for these activities: {}'.format(activities.keys()) if not date: # if no date provided get for the current month date = datetime.date.today().strftime('%Y-%m') send_message('Pulling reports for date: {}'.format(date)) forms = client.get_cube(activities.keys(), month=date) # 'processing {} forms'.format(len(forms)) for indicator in forms: site = sites[indicator['key']['Site']['id']] attributes = [] if 'attributes' in site: attributes = [ attr for attr in ai.attributeGroups.find( {'attributes.id': { '$in': site['attributes'] }}, { 'name': 1, 'mandatory': 1, "attributes.$": 1 }) ] if indicator['sum']: report, created = Report.objects.get_or_create( db_name=db_info['name'], date='{}-{}'.format( indicator['key']['Date']['year'], indicator['key']['Date']['month'], ), site_id=site['id'], activity_id=site['activity'], partner_id=site['partner']['id'], indicator_id=indicator['key']['Indicator']['id'], ) activity = activities[report.activity_id] report.value = indicator['sum'] report.category = activity['category'] report.activity = activity['name'] report.partner_name = site['partner']['name'] report.p_code = site['location']['code'] report.location_name = site['location']['name'] report.location_id = site['location']['id'] report.location_x = site['location'].get('longitude', None) report.location_y = site['location'].get('latitude', None) report.indicator_name = indicator['key']['Indicator']['label'] report.comments = site.get('comments', None) location = ai.locations.find_one({'id': report.location_id}) if location and 'adminEntities' in location: try: report.gov_code = str( location['adminEntities']['1370']['id']) report.governorate = location['adminEntities']['1370'][ 'name'] report.district_code = str( location['adminEntities']['1521']['id']) report.district = location['adminEntities']['1521'][ 'name'] report.cadastral_code = str( location['adminEntities']['1522']['id']) report.cadastral = location['adminEntities']['1522'][ 'name'] except Exception as exp: pass if created: for a in attributes: report.attributes.append( Attribute(name=a['name'], value=a['attributes'][0]['name'])) reports_created += 1 report.save() send_message('AI import finished, {} site reports created'.format( reports_created))
def import_ai(ai_db, username='', password=''): """ Imports data from Activity Info """ reports_created = 0 db_id = ai_db client = ActivityInfoClient(username, password) ai = MongoClient( os.environ.get('MONGO_URL', 'mongodb://localhost:27017'))[ os.environ.get('MONGODB_DATABASE', 'ai')] # store the whole database for future reference print u'Pulling database...' db_info = client.get_database(db_id) sentry.captureMessage('AI import started for database: {}'.format(db_info['name']), level=logging.INFO) ai.databases.update({'_id': db_id}, db_info, upsert=True) # split out all the attribute groups into a separate collection attribs = ai.databases.aggregate([ {'$project': {'groups': '$activities.attributeGroups'}}, {'$unwind': '$groups'}, {'$unwind': '$groups'}, {'$group': {'_id': "$_id", 'groups': {'$push': '$groups'}}}, ]) for attrib in attribs['result'][0]['groups']: ai.attributeGroups.update({'_id': attrib['id']}, attrib, upsert=True) for activity in ai.databases.find_one({'_id': db_id})['activities']: print u'Pulling sites for activity: {} - {}'.format(activity['id'], activity['name']) sites = client.get_sites(activity=activity['id'], include_monthly_reports=False) for site in sites: attributes = [attr for attr in ai.attributeGroups.find( {'attributes.id': {'$in': site['attributes']}}, {'name': 1, 'mandatory': 1, "attributes.$": 1} )] print ' Pulling reports for site: {} - {}'.format( site['id'], site['location']['name'].encode('UTF-8') ) reports = client.get_monthly_reports_for_site(site['id']) for date, indicators in reports.items(): for indicator in indicators: report, created = Report.objects.get_or_create( date=date, site_id=site['id'], activity_id=activity['id'], partner_id=site['partner']['id'], indicator_id=indicator['indicatorId'], ) report.value = indicator['value'] report.category = activity['category'] report.activity = activity['name'] report.partner_name = site['partner']['name'] report.location_name = site['location']['name'] report.location_id = site['location']['id'] report.location_x = site['location'].get('longitude', None) report.location_y = site['location'].get('latitude', None) report.indicator_name = indicator['indicatorName'] report.comments = site.get('comments', None) location = ai.locations.find_one({'ai_id': report.location_id}) if location: report.p_code = location['p_code'] elif report.comments: matches = re.search(r'(\d{5}-\d?\d-\d{3})', report.comments) if matches: report.p_code = matches.group(1) if created: for a in attributes: report.attributes.append( Attribute( name=a['name'], value=a['attributes'][0]['name'] ) ) print ' Created report: {} -> {} -> {} -> {} = {}'.format( report.date, report.location_name.encode('UTF-8'), report.partner_name.encode('UTF-8'), report.indicator_name.encode('UTF-8'), report.value ) report.save() sentry.captureMessage('AI import finished, {} new reports created'.format(reports_created), level=logging.INFO)