Пример #1
0
    def list(self, request, datasource_id, format="json"):

        sources = DataSource.objects.all()
        profile = request.user.iaso_profile
        data_source = sources.filter(projects__account=profile.account,
                                     id=datasource_id).first()

        if data_source is None:
            return Response({"error": "Data source not available"},
                            status=status.HTTP_404_NOT_FOUND)

        if data_source.credentials is None:
            return Response({"error": "No credentials configured"},
                            status=status.HTTP_401_UNAUTHORIZED)

        credentials = data_source.credentials

        t1_start = process_time()
        api = Api(credentials.url, credentials.login, credentials.password)
        params = {
            "fields": request.GET.get("fields", "id,displayName"),
            "pageSize": request.GET.get("pageSize", 50),
            "filter": request.GET.get("filter", None),
        }
        resp = api.get(self.resource, params=params).json()
        t1_stop = process_time()

        if "pager" in resp:
            if "nextPage" in resp["pager"]:
                del resp["pager"]["nextPage"]

        resp["stats"] = {"elapsedTimeMs": (t1_stop - t1_start) * 1000}
        return Response(resp)
Пример #2
0
def create_api(server=None, username=None, password=None, api_version=None):
    """Return a fully configured dhis2.Dhis instance"""
    if not any([server, username, password]):
        api = Api.from_auth_file(api_version=api_version,
                                 user_agent='dhis2-pk/{}'.format(__version__))
        logger.info("Found a file for server {}".format(api.base_url))
        return api
    else:
        return Api(server, username, password, api_version,
                   'dhis2-pk/{}'.format(__version__))
Пример #3
0
def sync_data_to_dhis2():
    data_value_sets_to_delete = []
    data_value_sets = DataValueSet.objects.filter(
        updated_at__lte=datetime.now(tz=timezone.utc) + timedelta(hours=1))

    for dvs in data_value_sets:
        payload = {}
        api = Api(dvs.user.instance.url, dvs.user.instance.username,
                  dvs.user.instance.password)

        payload['dataSet'] = dvs.data_set.dataset_id
        if dvs.mark_as_complete:
            payload['completeDate'] = json.dumps(dvs.created_at,
                                                 sort_keys=True,
                                                 indent=1,
                                                 cls=DjangoJSONEncoder)
        payload['period'] = dvs.period
        payload['orgUnit'] = dvs.org_unit.org_unit_id
        payload['dataValues'] = []
        for dv in dvs.datavalue_set.all():
            p = {
                "dataElement": dv.data_element.data_element_id,
                "categoryOptionCombo":
                dv.category_option_combo.category_option_combo_id,
                "value": dv.value,
                "comment": ""
            }
            payload['dataValues'].append(p)

        try:
            response = api.post('dataValueSets',
                                json=payload,
                                params={
                                    "dataSet": dvs.data_set.dataset_id,
                                    "orgUnit": dvs.org_unit.org_unit_id,
                                    "period": dvs.period
                                })
            if response.status_code == 200:
                data_value_sets_to_delete.append(dvs.pk)
        except RequestException as ex:
            logger.error(ex)

    logger.info("Syncing data complete")

    for dvs_id in data_value_sets_to_delete:
        dvs = DataValueSet.objects.get_or_none(pk=dvs_id)
        if dvs is not None:
            dvs.delete()
    if len(data_value_sets_to_delete) > 0:
        logger.info("Removing data value sets complete")
Пример #4
0
    def get_api(self, options):
        from dhis2 import Api

        api = Api(options.get("dhis2_url"), options.get("dhis2_user"),
                  options.get("dhis2_password"))

        return api
Пример #5
0
def sync_dhis2_metadata():
    invalidate_users_cache()
    invalidate_org_units_cache()
    invalidate_dataset_cache()

    logger.info("Starting to sync metadata")

    dhis2_instances = Instance.objects.all()
    for dhis2 in dhis2_instances:
        api = Api(dhis2.url, dhis2.username, dhis2.password)
        version = uuid.uuid4()

        logger.info("Downloading metadata from {} with version {}.".format(
            dhis2.url, version))

        sync_org_units(api, dhis2, version)
        sync_users(api, dhis2, version)
        sync_category_combos(api, dhis2, version)
        sync_data_elements(api, dhis2, version)
        sync_data_sets(api, dhis2, version)
        sync_sections(api, dhis2, version)

    logger.info("Syncing metadata ............ Done")

    org_units_to_cache = cache_users_with_their_assigned_org_units()
    cache_org_units_with_their_datasets(org_units_to_cache)
    cache_datasets_with_their_data_elements()
Пример #6
0
 def assign_orgunits_to_program(self, credentials):
     api = Api(credentials.url, credentials.login, credentials.password)
     program_id = "eBAyeGv0exc"
     orgunits = api.get("organisationUnits", params={"fields": "id", "paging": "false"}).json()["organisationUnits"]
     program = api.get("programs/" + program_id, params={"fields": ":all"}).json()
     program["organisationUnits"] = orgunits
     api.put("programs/" + program_id, program)
Пример #7
0
 def make_category_options_public(self, credentials):
     api = Api(credentials.url, credentials.login, credentials.password)
     for page in api.get_paged("categoryOptions", params={"fields": ":all"}, page_size=100):
         for category_option in page["categoryOptions"]:
             if category_option["name"] != "default":
                 try:
                     api.post(
                         "sharing?type=categoryOption&id=" + category_option["id"],
                         {
                             "meta": {"allowPublicAccess": True, "allowExternalAccess": False},
                             "object": {
                                 "id": category_option["id"],
                                 "name": category_option["name"],
                                 "displayName": category_option["displayName"],
                                 "publicAccess": "rwrw----",
                                 "user": category_option["user"],
                                 "externalAccess": False,
                             },
                         },
                     )
                 except Exception as e:
                     print("Failed to fix ", category_option["name"], e)
Пример #8
0
class Dhis2Extractor(object):
    """This class fetches DHIS2 data using the dhis2 library and outputs it in various formats."""

    FORMAT_CSV = "csv"

    def __init__(self, url, *, username, password):
        # Let's re-use an existing library instead of maintaining our own
        self._api = Api(url, username, password)

    def extract_organisation_units(self,
                                   *,
                                   fields=":all",
                                   output_format,
                                   output_path):
        """
        Extract organisation units.

        Methods should always expose parameters subject to changes, such as field names and output options.

        :param fields:
        :param output_format:
        :param output_path:
        :return:
        """

        try:
            response_data = self._api.get_paged("organisationUnits",
                                                params={"fields": fields},
                                                merge=True)
        except RequestException as e:  # Always handle exceptions
            raise Dhis2ExtractorException(
                # Prefer f-strings to concatenation / string interpolation
                f"An error occurred while fetching DHIS2 data (URL: {e.url}, status code: {e.code})"
            )

        organisation_units = response_data["organisationUnits"]
        dataframe = pd.DataFrame.from_dict(organisation_units)

        return self._dump(dataframe,
                          output_format=output_format,
                          output_path=output_path)

    def _dump(self, dataframe, *, output_format, output_path):
        if output_format == self.FORMAT_CSV:
            return dataframe.to_csv(output_path)

        raise NotImplementedError(f'Unknown output format "{output_format}"')
Пример #9
0
def main():
    args = parse_args()
    setup_logger()

    api = Api(server=args.server,
              username=args.username,
              password=args.password)

    data = list(load_csv(args.source_csv))
    validate_csv(data)

    programs_csv = [h.strip() for h in data[0] if h != 'orgunit']
    if not programs_csv:
        raise ValueError('No programs found')
    params_get = {'fields': 'id', 'paging': False}
    programs_server = [
        p['id']
        for p in api.get('programs', params=params_get).json()['programs']
    ]
    for p in programs_csv:
        if p not in programs_server:
            logger.error(
                u"Program {0} is not a valid program: {1}/programs/{0}.json".
                format(p, api.api_url))

    program_orgunit_map = get_program_orgunit_map(data)
    metadata_payload = []
    final = {}
    for program_uid, orgunit_list in iteritems(program_orgunit_map):
        params_get = {'fields': ':owner'}
        program = api.get('programs/{}'.format(program_uid),
                          params=params_get).json()
        updated = set_program_orgunits(program, orgunit_list,
                                       args.append_orgunits)
        metadata_payload.append(updated)

        with open('backup_{}.json'.format(program_uid), 'w') as f:
            json.dump(program, f, indent=4)

        print(u"[{}] - Assigning \033[1m{} (total: {})\033[0m "
              u"OrgUnits to Program \033[1m{}\033[0m...".format(
                  args.server, len(orgunit_list),
                  len(program['organisationUnits']), program['name']))

        final['programs'] = [updated]
        params_post = {"mergeMode": "REPLACE", "strategy": "UPDATE"}
        api.post(endpoint='metadata', params=params_post, data=final)
Пример #10
0
def main():
    args = parse_args()
    setup_logger()

    api = Api(server=args.server, username=args.username, password=args.password)
    p = {
        'paging': False,
        'filter': 'name:like:HNQIS',
        'fields': 'id,name'
    }
    programs = api.get('programs', params=p)
    print("event_date,program,name,event,_OverallScore,0CS-100,diff")
    fix_them = []

    csparams = {
        'filter': ['shortName:like:.0CS-100', 'name:!ilike:_DEL'],
        'paging': False,
        'fields': 'id'
    }
    root_compscores = [x['id'] for x in api.get('dataElements', params=csparams).json()['dataElements']]

    for p in programs['programs']:
        params = {
            'program': p['id'],
            'skipPaging': True,
            'fields': '[*]'
        }
        events = api.get('events', params=params).json()
        for event in events['events']:
            if analyze_event(p, event, root_compscores):
                fix_them.append(event)

    if fix_them and args.fix_values:
        logger.info(u"Fixing those events and resetting _Order Forward...")
        for i, e in enumerate(fix_them, 1):
            fixed = fix_event(e, root_compscores)
            logger.info(u"[{}/{}] Pushing event {}...".format(i, len(fix_them), e['event']))
            api.put('events/{}'.format(e['event']), data=fixed)
    else:
        logger.warn(u"Not fixing events")
Пример #11
0
def main():
    args = parse_args()
    setup_logger()

    api = Api(server=args.server, username=args.username, password=args.password)

    if not is_valid_uid(args.attribute_uid):
        logger.error("Attribute {} is not a valid UID".format(args.attribute_uid))

    data = list(load_csv(args.source_csv))
    validate_csv(data)

    attr_get = {'fields': 'id,name,{}Attribute'.format(args.object_type[:-1])}
    attr = api.get('attributes/{}'.format(args.attribute_uid), params=attr_get).json()
    if attr['{}Attribute'.format(args.object_type[:-1])] is False:
        logger.error("Attribute {} is not assigned to type {}".format(args.attribute_uid, args.object_type[:-1]))

    logger.info(
        "[{}] - Updating Attribute Values for Attribute \033[1m{}\033[0m for \033[1m{}\033[0m \033[1m{}\033[0m...".format(
            args.server, args.attribute_uid, len(data), args.object_type))
    try:
        time.sleep(3)
    except KeyboardInterrupt:
        logger.warn("\033[1m{}\033[0m".format("Aborted!"))
        pass

    for i, obj in enumerate(data, 1):
        obj_uid = obj.get('key')
        attribute_value = obj.get('value')
        params_get = {'fields': ':owner'}
        obj_old = api.get('{}/{}'.format(args.object_type, obj_uid), params=params_get).json()
        obj_updated = create_or_update_attributevalues(obj=obj_old, attribute_uid=args.attribute_uid,
                                                       attribute_value=attribute_value)
        api.put('{}/{}'.format(args.object_type, obj_uid), params=None, data=obj_updated)
        logger.info(u"{}/{} - Updated AttributeValue: {} - {}: {}".format(i, len(data), attribute_value,
                                                                                        args.object_type[:-1], obj_uid))
Пример #12
0
import pandas as pd
from tools.json import reindex, json_extract, json_extract_nested_ids
import gspread
from oauth2client.service_account import ServiceAccountCredentials
from gspread_dataframe import get_as_dataframe, set_with_dataframe
from gspread_formatting import *
import re


try:
    f = open("./auth.json")
except IOError:
    print("Please provide file auth.json with credentials for DHIS2 server")
    exit(1)
else:
    api_source = Api.from_auth_file('./auth.json')

# If no file path is specified, it tries to find a file called dish.json in:
#
# the DHIS_HOME environment variable
# your Home folder


# setup the logger
log_file = "./dummyDataTracker.log"
logzero.logfile(log_file)


def add_repeatable_stages(df, stage_counter):
    if df['Stage'].isna().sum() > 0:
        stage_indexes = df.index[df['Stage'].notnull()].tolist()
Пример #13
0
from dict2obj import Dict2Obj
import datetime
import requests
import re
from concurrent.futures.thread import ThreadPoolExecutor
import json
import hashlib

# import the logging library
import logging
# Get an instance of a logger
logger = logging.getLogger(__name__)
# Get DHIS2 credentials from the config
dhis2 = Dict2Obj(GeneralConfiguration.get_dhis2())
# create the DHIS2 API object
api = Api(dhis2.host, dhis2.username, dhis2.password)
# define the page size
page_size = int(GeneralConfiguration.get_default_page_size())

path = GeneralConfiguration.get_json_out_path()


def printPaginated(ressource, queryset, convertor, **kwargs):
    local_page_size = kwargs.get('page_size', page_size)
    p = Paginator(queryset, local_page_size)
    pages = p.num_pages
    curPage = 1
    timestamp = datetime.datetime.now().strftime("%d%m%Y%H%M%S.%f")
    while curPage <= pages:
        f = open(
            path + '\out_' + timestamp + '_' + ressource + '-' + str(curPage) +
Пример #14
0
def api():
    return Api(BASEURL, "admin", "district")
Пример #15
0
from dhis2 import Api, RequestException, setup_logger, logger
"""
Add "(updated)" to all Data Elements that contain "ANC" in its name.
Uses the method PUT.
Print errors if it failed.
"""

# Create a Api object
api = Api('play.dhis2.org/dev', 'admin', 'district')

# setup the logger
setup_logger()


def main():
    # Print DHIS2 Info
    logger.warn("You are running on DHIS2 version {} revision {} - "
                "Last Analytics generation was at: {}".format(
                    api.version, api.revision,
                    api.info.get('lastAnalyticsTableSuccess')))

    # GET dataElements that contain ANC in its name
    params = {'filter': 'name:like:ANC', 'paging': False, 'fields': ':owner'}
    data_elements = api.get('dataElements', params=params).json()

    # Loop through each dataElement
    for de in data_elements['dataElements']:
        # Add (updated) to the name
        de['name'] = '{} (updated)'.format(de['name'])

        try:
Пример #16
0
def main():

    my_parser = argparse.ArgumentParser(description='dashboard_checker')
    my_parser.add_argument('-i',
                           '--instance',
                           action="store",
                           dest="instance",
                           type=str,
                           help='URL of the instance to process')
    my_parser.add_argument(
        '-df',
        '--dashboard_filter',
        action="store",
        dest="dashboard_filter",
        type=str,
        help='Either a prefix or a list of comma separated UIDs')
    my_parser.add_argument('--no_data_warning',
                           dest='no_data_warning',
                           action='store_true')
    my_parser.add_argument('--omit-no_data_warning',
                           dest='no_data_warning',
                           action='store_false')
    my_parser.add_argument('-v',
                           '--verbose',
                           dest='verbose',
                           action='store_true')
    my_parser.set_defaults(no_data_warning=True)
    my_parser.set_defaults(verbose=False)
    args = my_parser.parse_args()

    if args.instance is not None:
        instances = [{
            'name': args.instance.split('/')[-1].replace(':', '_'),
            'url': args.instance
        }]
    else:
        instances = [
            #{'name':'newdemos', 'url':'https://who-demos.dhis2.org/newdemos', 'SQL_view_TRK':'xfemQFHUTUV', 'SQL_view_AGG':'lg8lFbDMw2Z'}
            #{'name':'tracker_dev', 'url': 'https://who-dev.dhis2.org/tracker_dev', 'SQL_view_TRK': 'xfemQFHUTUV', 'SQL_view_AGG': 'lg8lFbDMw2Z'}
            {
                'name': 'covid-19',
                'url': 'https://demos.dhis2.org/covid-19',
                'SQL_view_TRK': 'xfemQFHUTUV',
                'SQL_view_AGG': 'lg8lFbDMw2Z'
            }
        ]

    log_file = "./dashboard_checker.log"
    setup_logger(log_file)

    credentials_file = './auth.json'

    df = pd.DataFrame({},
                      columns=[
                          'dashboard_name', 'type', 'uid', 'name', 'issue',
                          'api_link', 'app_link'
                      ])

    errors_found = 0

    for instance in instances:
        try:
            f = open(credentials_file)
        except IOError:
            print(
                "Please provide file auth.json with credentials for DHIS2 server"
            )
            exit(1)
        else:
            with open(credentials_file, 'r') as json_file:
                credentials = json.load(json_file)
            api_source = Api(instance['url'], credentials['dhis']['username'],
                             credentials['dhis']['password'])

        # Get dashboards
        params = {"fields": "*", "paging": "false"}
        if args.dashboard_filter is not None:
            item_list = args.dashboard_filter.split(',')
            if len(item_list) == 1 and not is_valid_uid(item_list[0]):
                params["filter"] = "name:$like:" + args.dashboard_filter
            # Let's consider it as a list of uids
            else:
                # Validate the list
                for item in item_list:
                    if not is_valid_uid(item):
                        logger.error("UID " + item +
                                     " is not a valid DHIS2 UID")
                        exit(1)
                params["filter"] = "id:in:[" + args.dashboard_filter + "]"

        dashboards = api_source.get('dashboards',
                                    params=params).json()['dashboards']

        dashboard_item_with_issues_row = dict()

        for dashboard in dashboards:
            logger.info('Processing dashboard ' + dashboard['name'])
            dashboard_item_with_issues_row['dashboard_name'] = dashboard[
                'name']
            if '2.33' not in api_source.version:
                dashboard_items = [
                    'visualization', 'eventReport', 'eventChart', 'map'
                ]
            else:
                dashboard_items = [
                    'chart', 'reportTable', 'eventReport', 'eventChart', 'map'
                ]
            for dashboardItem in dashboard['dashboardItems']:
                # The dashboard item could be of type TEXT, for example
                # in this case there is nothing to do
                dashboard_item_type_found = False
                for dashboard_item in dashboard_items:
                    if dashboard_item in dashboardItem:
                        dashboard_item_type_found = True
                        dashboard_item_with_issues_row['issue'] = ""
                        dashboard_item_with_issues_row['type'] = dashboard_item
                        dashboard_item_with_issues_row['uid'] = dashboardItem[
                            dashboard_item]['id']
                        dashboard_item_with_issues_row['name'] = ""
                        if args.verbose:
                            logger.info('Trying ' + dashboard_item + ' ' +
                                        dashboardItem[dashboard_item]['id'])
                        try:
                            api_endpoint = dashboard_item + 's/' + dashboardItem[
                                dashboard_item]['id']
                            dashboard_item_with_issues_row[
                                'api_link'] = instance[
                                    'url'] + '/api/' + api_endpoint
                            item = api_source.get(api_endpoint,
                                                  params={
                                                      "fields": "*"
                                                  }).json()
                        except RequestException as e:
                            logger.error(dashboard_item + ' ' +
                                         dashboardItem[dashboard_item]['id'] +
                                         " BROKEN with error " + str(e))
                            dashboard_item_with_issues_row['issue'] = str(e)
                            errors_found += 1
                        else:
                            dashboard_item_with_issues_row['name'] = item[
                                'name']
                            if dashboard_item in ['eventReport', 'eventChart']:
                                continue
                            # Try to get the data
                            try:
                                if dashboard_item == 'map':
                                    for map_view in item['mapViews']:
                                        params = build_analytics_payload(
                                            map_view, args.verbose)
                                        if params != {}:
                                            if 'layer' in map_view and map_view[
                                                    'layer'] == 'event' and 'program' in map_view:
                                                data = api_source.get(
                                                    'analytics/events/query/' +
                                                    map_view['program']['id'],
                                                    params=params).json()
                                            else:
                                                data = api_source.get(
                                                    'analytics',
                                                    params=params).json()
                                else:
                                    data = api_source.get(
                                        'analytics',
                                        params=build_analytics_payload(
                                            item, args.verbose)).json()
                            except RequestException as e:
                                logger.error(
                                    dashboard_item + ' ' +
                                    dashboardItem[dashboard_item]['id'] +
                                    " data cannot be retrieved with error " +
                                    str(e))
                                dashboard_item_with_issues_row['issue'] = str(
                                    e)
                                errors_found += 1
                            else:
                                # print(data['rows'])
                                if args.no_data_warning and (
                                        'rows' not in data
                                        or len(data['rows']) == 0):
                                    dashboard_item_with_issues_row[
                                        'issue'] = 'NO DATA'
                                    logger.warning(
                                        dashboardItem[dashboard_item]['id'] +
                                        ': NO DATA!!!')

                            #exit(0)

                if dashboard_item_type_found and dashboard_item_with_issues_row[
                        'issue'] != "":
                    if dashboard_item_with_issues_row[
                            'type'] == 'visualization':
                        dashboard_item_with_issues_row['app_link'] = instance['url'] + \
                                                                     '/dhis-web-data-visualizer/index.html#/' + \
                                                                     dashboard_item_with_issues_row['uid']
                    elif dashboard_item_with_issues_row['type'] == 'map':
                        dashboard_item_with_issues_row['app_link'] = instance['url'] + \
                                                                     '/dhis-web-maps/index.html'
                    elif dashboard_item_with_issues_row[
                            'type'] == 'eventReport':
                        dashboard_item_with_issues_row['app_link'] = instance['url'] + \
                                                                     'dhis-web-event-reports/index.html?id=' + \
                                                                     dashboard_item_with_issues_row['uid']
                    elif dashboard_item_with_issues_row[
                            'type'] == 'eventChart':
                        dashboard_item_with_issues_row['app_link'] = instance['url'] + \
                                                                     '/dhis-web-event-visualizer/index.html?id=' + \
                                                                     dashboard_item_with_issues_row['uid']
                    df = df.append(dashboard_item_with_issues_row,
                                   ignore_index=True)

    export_csv = df.to_csv(instance['name'] + '.csv', index=None, header=True)

    # Release log handlers
    handlers = logger.handlers[:]
    for handler in handlers:
        handler.close()
        logger.removeHandler(handler)

    return errors_found
Пример #17
0
    instance = sys.argv[4]
else:
    instance = None

credentials_file = 'auth.json'

try:
    f = open(credentials_file)
except IOError:
    print("Please provide file auth.json with credentials for DHIS2 server")
    exit(1)
else:
    with open(credentials_file, 'r') as json_file:
        credentials = json.load(json_file)
    if instance is not None:
        api = Api(instance, credentials['dhis']['username'],
                  credentials['dhis']['password'])
    else:
        api = Api.from_auth_file(credentials_file)

de_uid = generate_uid()

dummy_data_de = {
    "id": de_uid,
    "name": "Dummy data placeholder",
    "shortName": "Dummy data placeholder",
    "aggregationType": "NONE",
    "domainType": "AGGREGATE",
    "publicAccess": "--------",
    "externalAccess": False,
    "valueType": "NUMBER",
    "zeroIsSignificant": False,
Пример #18
0
    def get_api(self, mapping_version):

        if not mapping_version.id in self.api_cache:
            credentials = mapping_version.mapping.data_source.credentials
            self.api_cache[mapping_version.id] = Api(credentials.url, credentials.login, credentials.password)
        return self.api_cache[mapping_version.id]
Пример #19
0
def api():
    return Api(BASEURL, 'admin', 'district')
Пример #20
0
import re
from os import path

import Levenshtein as lev
import pandas as pd
from bs4 import BeautifulSoup
from dhis2 import Api, RequestException, setup_logger, logger, generate_uid, \
    is_valid_uid  # make sure you have dhis2.py installed, otherwise run "pip3 install dhis2.py"

try:
    f = open("./auth.json")
except IOError:
    print("Please provide file auth.json with credentials for DHIS2 server")
    exit(1)
else:
    api = Api.from_auth_file('./auth.json')

#          "dhis2.util.on( 'dhis2.de.event.formReady', function( event, ds ) {\n" \
#           "} );\n" \
js_code = "" \
          "console.log('Applying translations');\n" \
          "    $(function() {\n" \
          "        	$.ajax({\n" \
          "        	   type: 'GET',\n" \
          "        	   url: '../api/me.json',\n" \
          "        	   success: function(data){\n" \
          "        	     if('settings' in data) {\n" \
          "                 var locale = data.settings.keyDbLocale;\n" \
          "                 console.log('DB Locale: ' + locale);\n" \
          "        	     }\n" \
          "        	     else {\n" \
Пример #21
0
def api_with_api_version():
    return Api(BASEURL, "admin", "district", api_version=29)
Пример #22
0
 def get_api(self, options):
     return Api(options.get("dhis2_url"), options.get("dhis2_user"),
                options.get("dhis2_password"))
Пример #23
0
class Dhis2Client:
    def __init__(self, *, url, username, password, verbose=False):
        self._api = Api(url, username, password)
        self.name = url
        self.verbose = verbose

    def fetch_info(self):
        info = self._api.get_info()
        self.name = info["systemName"]
        return info

    def fetch_data_elements(self):
        results = []
        for page in self._api.get_paged("dataElements",
                                        params={"fields": ":all"},
                                        page_size=100):
            if self.verbose:
                logger.info(
                    "sync_log %s: page from data_elements %s",
                    self.name,
                    page.get("pager"),
                )
            results.extend(
                [DataElementResult(data) for data in page["dataElements"]])
        return results

    def fetch_datasets(self):
        results = []
        for page in self._api.get_paged("dataSets",
                                        params={"fields": ":all"},
                                        page_size=100):
            if self.verbose:
                logger.info("sync_log %s: page from datasets %s", self.name,
                            page.get("pager"))
            results.extend([DataSetResult(data) for data in page["dataSets"]])
        return results

    def fetch_indicator_types(self):
        results = []
        for page in self._api.get_paged("indicatorTypes",
                                        params={"fields": ":all"},
                                        page_size=100):
            if self.verbose:
                logger.info(
                    "sync_log %s: page from indicator_types %s",
                    self.name,
                    page.get("pager"),
                )
            results.extend(
                [IndicatorTypeResult(data) for data in page["indicatorTypes"]])
        return results

    def fetch_indicators(self):
        results = []
        for page in self._api.get_paged("indicators",
                                        params={"fields": ":all"},
                                        page_size=100):
            if self.verbose:
                logger.info("sync_log %s: page from indicators %s", self.name,
                            page.get("pager"))
            results.extend(
                [IndicatorResult(data) for data in page["indicators"]])
        return results

    def fetch_organisation_units(self):
        results = []
        for page in self._api.get_paged("organisationUnits",
                                        params={"fields": ":all"},
                                        page_size=100):
            if self.verbose:
                logger.info(
                    "sync_log %s: page from organisation_units %s",
                    self.name,
                    page.get("pager"),
                )
            # rewrite path -> replace "/" by "." for correct ltree path
            # warning: in place edit, can side effect on tests
            for element in page["organisationUnits"]:
                if "path" in element:
                    element["path"] = element["path"].replace("/",
                                                              ".").strip(".")

            results.extend([
                OrganisationUnitResult(data)
                for data in page["organisationUnits"]
            ])
        return results
Пример #24
0
def api_with_api_version():
    return Api(BASEURL, 'admin', 'district', api_version=30)
Пример #25
0
 def __init__(self, *, url, username, password, verbose=False):
     self._api = Api(url, username, password)
     self.name = url
     self.verbose = verbose
Пример #26
0
from dhis2 import Api, RequestException, setup_logger, logger, load_json, import_response_ok

"""
Import a metadata JSON file from your computer.
"""

# Create a Api object
api = Api("play.dhis2.org/dev", "admin", "district")

# setup the logger
setup_logger(include_caller=False)


def main():
    # load the JSON file that sits next to the script
    data = load_json("2_import_metadata.json")

    try:
        # import metadata
        r = api.post(
            "metadata.json",
            params={"preheatCache": False, "strategy": "CREATE"},
            json=data,
            timeout=(3, 30)
        )
    except RequestException as e:
        logger.error("Import failed: {}".format(e))
    else:
        if import_response_ok(r.json()):
            logger.info("Import successful!")
        else:
Пример #27
0
def main():
    import argparse
    global api_source

    my_parser = argparse.ArgumentParser(
        prog='dummy_data_agg',
        description='Create dummy data for aggregated datasets',
        epilog="example1"
        "\nexample2",
        formatter_class=argparse.RawDescriptionHelpFormatter)
    my_parser.add_argument(
        'Dataset',
        metavar='dataset_param',
        type=str,
        help='the uid of the dataset to use or a string to filter datasets')
    my_parser.add_argument(
        '-sd',
        '--start_date',
        action="store",
        dest="start_date",
        type=str,
        help=
        'start date for the period to use to generate data (default is today - 1 year)'
    )
    my_parser.add_argument(
        '-ptf',
        '--period_type_filter',
        action="store",
        dest="period_type_filter",
        type=str,
        help='only applicable when having multiple datasets: d, w, m, y')
    my_parser.add_argument(
        '-ed',
        '--end_date',
        action="store",
        dest="end_date",
        type=str,
        help=
        'end date for the period to use to generate data (default is today)')
    my_parser.add_argument(
        '-ous',
        '--org_unit_selection',
        action="store",
        metavar=('type', 'value'),
        nargs=2,
        help=
        'Provide a type of org unit selection from [uid,uid_children,name,code,level] and the value to use'
        'Eg: --ous uid QXtjg5dh34A')
    # Parameters should be 0 or 1
    my_parser.add_argument('-cf',
                           '--create_flat_file',
                           action="store",
                           metavar='file_name',
                           const='xxx',
                           nargs='?',
                           help='Create spreadsheet for min/max values'
                           'Eg: --create_flat_file=my_file.csv')
    my_parser.add_argument('-uf',
                           '--use_flat_file',
                           action="store",
                           metavar='file_name',
                           nargs=1,
                           help='Use spreadsheet for min/max values'
                           'Eg: --use_flat_file=my_file.csv')
    my_parser.add_argument(
        '-i',
        '--instance',
        action="store",
        dest="instance",
        type=str,
        help=
        'instance to use for dummy data injection (robot account is required!) - default is the URL in auth.json'
    )
    my_parser.add_argument(
        '-ours',
        '--ous_random_size',
        action="store",
        dest="ous_random_size",
        type=str,
        help=
        'From all OUs selected from ous command, takes a random sample of ous_random_size'
    )

    args = my_parser.parse_args()

    credentials_file = 'auth.json'

    try:
        f = open(credentials_file)
    except IOError:
        print(
            "Please provide file auth.json with credentials for DHIS2 server")
        exit(1)
    else:
        with open(credentials_file, 'r') as json_file:
            credentials = json.load(json_file)
        if args.instance is not None:
            api_source = Api(args.instance, credentials['dhis']['username'],
                             credentials['dhis']['password'])
        else:
            api_source = Api.from_auth_file(credentials_file)

    logger.warning("Server source running DHIS2 version {} revision {}".format(
        api_source.version, api_source.revision))

    #WHAT
    dsParam = args.Dataset
    # WHERE
    ouUIDs = list()
    #WHEN
    start_date = ""
    end_date = ""
    periods = list()

    # Assign values from parameters provided if applicable
    if args.create_flat_file is None:  # If we are creating a flat file it does not matter if not provided
        if args.org_unit_selection is None:
            print(
                'Please provide a value for org_unit_selection to create the dummy data'
            )
        else:
            if len(args.org_unit_selection) >= 1:
                ouUIDs = get_org_units(args.org_unit_selection[0],
                                       args.org_unit_selection[1],
                                       int(args.ous_random_size))
                if len(ouUIDs) == 0:
                    print('The OU selection ' + args.org_unit_selection[0] +
                          ' ' + args.org_unit_selection[1] +
                          ' returned no result')
                    exit(1)

        if args.start_date is None:
            start_date = (date.today() -
                          timedelta(days=365)).strftime("%Y-%m-%d")
        else:
            start_date = args.start_date
            if not isDateFormat(start_date):
                print('Start date provided ' + start_date +
                      ' has a wrong format')
                exit(1)
        if args.end_date is None:
            end_date = (date.today()).strftime("%Y-%m-%d")
        else:
            end_date = args.end_date
            if not isDateFormat(end_date):
                print('End date provided ' + end_date + ' has a wrong format')
                exit(1)

    periods = list()

    if args.create_flat_file is not None:
        df_min_max = pd.DataFrame({},
                                  columns=[
                                      'DE UID', 'COC UID', 'DE Name',
                                      'COC Name', 'valueType', 'min', 'max'
                                  ])
    else:
        df_min_max = None

    if args.use_flat_file is not None:
        filename = args.use_flat_file
        logger.info("Reading " + filename + " for min/max value")
        df_min_max = pd.read_csv(filename, sep=None, engine='python')

    CC = api_source.get('categoryCombos',
                        params={
                            "paging": "false",
                            "fields": "id,name,categoryOptionCombos"
                        }).json()['categoryCombos']
    CC = reindex(CC, 'id')
    defaultCC = ''
    for catcomboUID in CC:
        if CC[catcomboUID]['name'] == 'default':
            defaultCC = catcomboUID
            break
    if defaultCC == '':
        logger.warning('Could not find default Category Combo')

    COC = api_source.get('categoryOptionCombos',
                         params={
                             "paging": "false",
                             "fields": "id,name"
                         }).json()['categoryOptionCombos']
    COC = reindex(COC, 'id')

    DE = api_source.get(
        'dataElements',
        params={
            "paging": "false",
            "fields":
            "id,name,categoryCombo,aggregationType,valueType,optionSet"
        }).json()['dataElements']
    DE = reindex(DE, 'id')

    # Check for optionSets in the DE
    optionSetUIDs = list()
    for de in DE:
        if 'optionSet' in de:
            optionSetUIDs.append(de['optionSet']['id'])
    if len(optionSetUIDs) > 0:
        options = api_source.get('options',
                                 params={
                                     "paging":
                                     "false",
                                     "fields":
                                     "id,name,code",
                                     "filter":
                                     "optionSet.id:eq:" +
                                     ','.join(optionSetUIDs)
                                 }).json()['options']

    de_numeric_types = [
        'INTEGER_POSITIVE', 'INTEGER', 'INTEGER_ZERO_OR_POSITIVE', 'NUMBER',
        'PERCENTAGE', 'INTEGER_ZERO_OR_NEGATIVE'
    ]

    # Get the datasets"
    if is_valid_uid(dsParam):
        dataset_filter = "id:eq:" + dsParam
    else:
        dataset_filter = "name:like:" + dsParam

    dataSets = api_source.get(
        'dataSets',
        params={
            "paging": "false",
            "fields": "id,name,dataSetElements,periodType,"
            "formType,dataEntryForm,sections,organisationUnits",
            "filter": dataset_filter
        }).json()['dataSets']
    # Only one dataSet
    if len(dataSets) == 0:
        logger.error("Could not find any dataset")
        exit(1)
    else:
        if len(dataSets) > 1 and args.period_type_filter is not None:
            periodTypeFilter = args.period_type_filter
            if periodTypeFilter.lower() not in [
                    'daily', 'weekly', 'monthly', 'quarterly', 'yearly'
            ]:
                logger.error('Period type to filter not supported:' +
                             periodTypeFilter)
            else:
                filteredDatasets = list()
                for ds in dataSets:
                    if ds['periodType'].lower() == periodTypeFilter.lower():
                        filteredDatasets.append(ds)
                dataSets = filteredDatasets

        # Create workbook
        if args.create_flat_file is not None:
            ouput_file_name = 'datasets_' + dsParam + '.xlsx'
            ouput_file_name = args.create_flat_file + '.xlsx'
            writer = pd.ExcelWriter(ouput_file_name)
        for ds in dataSets:
            logger.info("Processing dataset " + ds['name'])
            if start_date != "" and end_date != "":
                logger.info("Period type is " + ds['periodType'] +
                            " - Generating periods from " + start_date +
                            " to " + end_date)
                periods = get_periods(ds['periodType'], start_date, end_date)
            if len(ouUIDs) > 0:
                logger.info("Verifying org unit selection")
                for ou_uid in ouUIDs:
                    if not is_ou_assigned_to_ds(ou_uid, ds):
                        ouUIDs.remove(ou_uid)
                        logger.warning("Org unit " + ou_uid +
                                       " is not assigned to dataset " +
                                       ds['id'])

            dsDataElements = dict()
            greyedFields = list()

            # Analyse the sections of the dataSet looking for greyedFields
            if 'sections' in ds:
                sectionUIDs = ""
                for section in ds['sections']:
                    sectionUIDs += (section['id'] + ",")
                logger.info("Found " + str(sectionUIDs.count(',')) +
                            " sections in dataset")
                # Get sections
                sections = api_source.get(
                    'sections',
                    params={
                        "paging": "false",
                        "fields":
                        "id,name,greyedFields[dataElement,categoryOptionCombo]",
                        "filter": "id:in:[" + sectionUIDs + "]"
                    }).json()['sections']
                for section in sections:
                    if len(section['greyedFields']) > 0:
                        for element in section['greyedFields']:
                            greyedFields.append(
                                element['dataElement']['id'] + '.' +
                                element['categoryOptionCombo']['id'])

            # Get dataElements
            for DSE in ds['dataSetElements']:
                df_min_max = pd.DataFrame({},
                                          columns=[
                                              'DE UID', 'COC UID', 'DE Name',
                                              'COC Name', 'valueType', 'min',
                                              'max'
                                          ])
                de = ''
                if 'dataElement' in DSE:
                    deUID = DSE['dataElement']['id']
                    dsDataElements[deUID] = dict()
                    de = DE[deUID]  # Get all dataElement information
                    dsDataElements[deUID]['valueType'] = de['valueType']

                    # Add options to the dataelement dict if pertinent
                    if 'optionSet' in de:
                        options = api_source.get('options',
                                                 params={
                                                     "paging":
                                                     "false",
                                                     "fields":
                                                     "id,name,code",
                                                     "filter":
                                                     "optionSet.id:eq:" +
                                                     de['optionSet']['id']
                                                 }).json()['options']
                        dsDataElements[deUID]['options'] = list()
                        for option in options:
                            dsDataElements[deUID]['options'].append(
                                option['code'])

                    # Check if the Category Combo is specified in the dataElement definition
                    COCs = list()
                    if 'categoryCombo' in de and de['categoryCombo'][
                            'id'] != defaultCC:
                        COCs = CC[de['categoryCombo']
                                  ['id']]['categoryOptionCombos']

                    # Check if Category Combo is specified for the dataElement in the dataSet
                    elif 'categoryCombo' in DSE and DSE['categoryCombo'][
                            'id'] != defaultCC:
                        COCs = CC[DSE['categoryCombo']
                                  ['id']]['categoryOptionCombos']

                    # Add COCs to the dataElement dictionary
                    if len(COCs) > 0:
                        dsDataElements[deUID]['COCs'] = list()
                        for coc in COCs:
                            dsDataElements[deUID]['COCs'].append(coc['id'])

            logger.info("Found " + str(len(dsDataElements)) +
                        " dataElements in dataset")

            if args.create_flat_file is not None:
                for de in dsDataElements:
                    if 'COCs' in dsDataElements[de]:
                        for coc in dsDataElements[de]['COCs']:
                            str_pair = de + "." + coc
                            if str_pair not in greyedFields:
                                df_min_max = df_min_max.append(
                                    {
                                        "DE UID":
                                        de,
                                        "COC UID":
                                        coc,
                                        "DE Name":
                                        DE[de]['name'],
                                        "COC Name":
                                        COC[coc]['name'],
                                        "valueType":
                                        dsDataElements[de]['valueType'],
                                        "min":
                                        "",
                                        "max":
                                        ""
                                    },
                                    ignore_index=True)
                    else:
                        df_min_max = df_min_max.append(
                            {
                                "DE UID": de,
                                "COC UID": "",
                                "DE Name": DE[de]['name'],
                                "COC Name": "",
                                "valueType": dsDataElements[de]['valueType'],
                                "min": "",
                                "max": ""
                            },
                            ignore_index=True)

                # Save csv file
                # export_csv = df_min_max.to_csv(r'./ds_' + ds['name'].replace(' ', '_') + '_min_max.csv', index=None,
                #                               header=True)
                df_min_max.to_excel(writer, ds['id'], index=False)

            else:
                dataValueSets = list()
                ouCount = 1
                for ouUID in ouUIDs:
                    logger.info("Processing org unit " + ouUID + " - " +
                                str(ouCount) + "/" + str(len(ouUIDs)))
                    for period in periods:
                        #logger.info("Processing period " + period)
                        for de in dsDataElements:
                            value_type = dsDataElements[de]['valueType']
                            min_value = max_value = None
                            options = None
                            if 'options' in dsDataElements[de]:
                                options = dsDataElements[de]['options']
                            if 'COCs' in dsDataElements[de]:
                                for coc in dsDataElements[de]['COCs']:
                                    str_pair = de + "." + coc
                                    if str_pair not in greyedFields:
                                        if df_min_max is not None:
                                            min_value, max_value = get_min_max_from_df(
                                                df_min_max, value_type, de,
                                                coc)
                                        # logger.info(
                                        #     "Generating value for DE (" + value_type + "): " + DE[de]['name'] + " with COC")
                                        value = generate_dummy_value({
                                            'value_type':
                                            value_type,
                                            'min_value':
                                            min_value,
                                            'max_value':
                                            max_value,
                                            'options':
                                            options
                                        })
                                        if value is not None:  # Skip if it is None
                                            dataValueSets.append({
                                                "dataElement":
                                                de,
                                                "categoryOptionCombo":
                                                coc,
                                                "value":
                                                value,
                                                "orgUnit":
                                                ouUID,
                                                "period":
                                                period
                                            })
                                    # else:
                                    #     logger.warning('Skipping ' + str_pair + ' because is greyed in section')
                            else:
                                if df_min_max is not None:
                                    min_value, max_value = get_min_max_from_df(
                                        df_min_max, value_type, de)
                                # logger.info("Generating value for DE (" + value_type + "): " + DE[de]['name'])
                                value = generate_dummy_value({
                                    'value_type': value_type,
                                    'min_value': min_value,
                                    'max_value': max_value,
                                    'options': options
                                })
                                if value is not None:  # Skip if it is None
                                    dataValueSets.append({
                                        "dataElement": de,
                                        "value": value,
                                        "orgUnit": ouUID,
                                        "period": period
                                    })

                    post_to_server({'dataValues': dataValueSets},
                                   'dataValueSets')
                    dataValueSets = list()
                    ouCount += 1

        if args.create_flat_file is not None:
            writer.save()
def main():
    args = parse_args()
    setup_logger()

    api = Api(server=args.server, username=args.username, password=args.password)

    if '.psi-mis.org' not in args.server and '.hnqis.org' not in args.server:
        logger.warn("This script is intended only for *.psi-mis.org or *.hnqis.org")
        sys.exit(0)

    indicators = {}
    backup_indicators = []
    container = []

    for ha in HEALTH_AREAS:

        # VMMC splits do not have their own HA
        if ha == 'VMMC':
            p1 = {
                'paging': False,
                'filter': [
                    'name:like:HNQIS - {}'.format(ha),
                    'name:like$:count',
                    'program.name:!like:v1'  # don't get v1 programIndicators
                ],
                'fields': '[id,name]'
            }
        else:
            p1 = {
                'paging': False,
                'filter': [
                    'name:like:HNQIS - {} count'.format(ha),
                    'program.name:!like:v1'  # don't get v1 programIndicators
                ],
                'fields': '[id,name]'
            }
        data1 = api.get('programIndicators', params=p1).json()
        pi_uids = [p['id'] for p in data1['programIndicators']]

        p2 = {
            'paging': False,
            'filter': ['name:eq:HNQIS - {} count'.format(ha)],
            'fields': ':owner'
        }
        data2 = api.get('indicators', params=p2).json()
        backup_indicators.append(data2['indicators'])

        if ha == 'VMMC':
            p3 = {
                'paging': False,
                'filter': [
                    'shortName:like: HNQIS {}'.format(ha),
                    'name:!like:v1'
                ],
                'fields': 'id,name'
            }
        else:
            p3 = {
                'paging': False,
                'filter': [
                    'shortName:like: HNQIS {}'.format(ha),  # 2.30 would need to change filters
                    'name:!like:v1'
                ],
                'fields': 'id,name'
            }
        data3 = api.get('programs', params=p3).json()
        no_of_programs = len(data3['programs'])

        if no_of_programs != len(pi_uids):
            print(u"\033[1mWarning\033[1m\033[0m - number of {} programs ({}) "
                  u"does not match number of 'count' programIndicators ({})!".format(ha, no_of_programs, len(pi_uids)))
            print("\n".join([x['name'] for x in data3['programs']]))

        if len(data2['indicators']) == 1:
            i = data2['indicators'][0]
            i['numerator'] = create_numerator(pi_uids)
            container.append(i)
            print(u'  \033[1m{}\033[0m - Added {} programIndicators to numerator of indicator "{}"'.format(ha, len(pi_uids), i['name']))

        elif len(data2['indicators']) > 1:
            print(u"\033[1mMore than one indicator found for health area {}\033[0m".format(ha))
        elif len(pi_uids) != 0:
            print(u"\033[1mNo indicator found for health area {}\033[0m".format(ha))

    dump_to_file(backup_indicators)
    indicators['indicators'] = container

    print(u"Posting updated programindicators to \033[1m{}\033[0m...".format(args.server))
    time.sleep(3)
    
    api.post('metadata', params={'importMode': 'COMMIT', 'preheatCache': False}, data=indicators)
Пример #29
0
 def __init__(self, url, *, username, password):
     # Let's re-use an existing library instead of maintaining our own
     self._api = Api(url, username, password)
Пример #30
0
import json

credentials_file = './auth.json'
instance = None
instance = 'https://who-demos.dhis2.org/covid-19'

try:
    f = open(credentials_file)
except IOError:
    print("Please provide file auth.json with credentials for DHIS2 server")
    exit(1)
else:
    with open(credentials_file, 'r') as json_file:
        credentials = json.load(json_file)
    if instance is not None:
        api = Api(instance, credentials['dhis']['username'],
                  credentials['dhis']['password'])
    else:
        api = Api.from_auth_file(credentials_file)

for tei_uid in [
        'mtOF630AvuW', 'murTrn4YinD', 'SShni7GDoRE', 'f06rwiTeJwc',
        'ZTue9sD5ONy', 'lrg63P8mOV2', 'k1eOcQe8CnQ', 'N8kyBO5164B',
        'raXg7iD6jYT'
]:
    try:
        response = api.delete('trackedEntityInstances/' + tei_uid)
    except RequestException as e:
        logger.error(e)
        pass
    else:
        logger.info("TEI " + tei_uid + " removed")