Пример #1
0
 def assign_orgunits_to_program(self, credentials):
     api = Api(credentials.url, credentials.login, credentials.password)
     program_id = "eBAyeGv0exc"
     orgunits = api.get("organisationUnits", params={"fields": "id", "paging": "false"}).json()["organisationUnits"]
     program = api.get("programs/" + program_id, params={"fields": ":all"}).json()
     program["organisationUnits"] = orgunits
     api.put("programs/" + program_id, program)
Пример #2
0
    def list(self, request, datasource_id, format="json"):

        sources = DataSource.objects.all()
        profile = request.user.iaso_profile
        data_source = sources.filter(projects__account=profile.account,
                                     id=datasource_id).first()

        if data_source is None:
            return Response({"error": "Data source not available"},
                            status=status.HTTP_404_NOT_FOUND)

        if data_source.credentials is None:
            return Response({"error": "No credentials configured"},
                            status=status.HTTP_401_UNAUTHORIZED)

        credentials = data_source.credentials

        t1_start = process_time()
        api = Api(credentials.url, credentials.login, credentials.password)
        params = {
            "fields": request.GET.get("fields", "id,displayName"),
            "pageSize": request.GET.get("pageSize", 50),
            "filter": request.GET.get("filter", None),
        }
        resp = api.get(self.resource, params=params).json()
        t1_stop = process_time()

        if "pager" in resp:
            if "nextPage" in resp["pager"]:
                del resp["pager"]["nextPage"]

        resp["stats"] = {"elapsedTimeMs": (t1_stop - t1_start) * 1000}
        return Response(resp)
Пример #3
0
def main():
    args = parse_args()
    setup_logger()

    api = Api(server=args.server,
              username=args.username,
              password=args.password)

    data = list(load_csv(args.source_csv))
    validate_csv(data)

    programs_csv = [h.strip() for h in data[0] if h != 'orgunit']
    if not programs_csv:
        raise ValueError('No programs found')
    params_get = {'fields': 'id', 'paging': False}
    programs_server = [
        p['id']
        for p in api.get('programs', params=params_get).json()['programs']
    ]
    for p in programs_csv:
        if p not in programs_server:
            logger.error(
                u"Program {0} is not a valid program: {1}/programs/{0}.json".
                format(p, api.api_url))

    program_orgunit_map = get_program_orgunit_map(data)
    metadata_payload = []
    final = {}
    for program_uid, orgunit_list in iteritems(program_orgunit_map):
        params_get = {'fields': ':owner'}
        program = api.get('programs/{}'.format(program_uid),
                          params=params_get).json()
        updated = set_program_orgunits(program, orgunit_list,
                                       args.append_orgunits)
        metadata_payload.append(updated)

        with open('backup_{}.json'.format(program_uid), 'w') as f:
            json.dump(program, f, indent=4)

        print(u"[{}] - Assigning \033[1m{} (total: {})\033[0m "
              u"OrgUnits to Program \033[1m{}\033[0m...".format(
                  args.server, len(orgunit_list),
                  len(program['organisationUnits']), program['name']))

        final['programs'] = [updated]
        params_post = {"mergeMode": "REPLACE", "strategy": "UPDATE"}
        api.post(endpoint='metadata', params=params_post, data=final)
Пример #4
0
def main():
    args = parse_args()
    setup_logger()

    api = Api(server=args.server, username=args.username, password=args.password)
    p = {
        'paging': False,
        'filter': 'name:like:HNQIS',
        'fields': 'id,name'
    }
    programs = api.get('programs', params=p)
    print("event_date,program,name,event,_OverallScore,0CS-100,diff")
    fix_them = []

    csparams = {
        'filter': ['shortName:like:.0CS-100', 'name:!ilike:_DEL'],
        'paging': False,
        'fields': 'id'
    }
    root_compscores = [x['id'] for x in api.get('dataElements', params=csparams).json()['dataElements']]

    for p in programs['programs']:
        params = {
            'program': p['id'],
            'skipPaging': True,
            'fields': '[*]'
        }
        events = api.get('events', params=params).json()
        for event in events['events']:
            if analyze_event(p, event, root_compscores):
                fix_them.append(event)

    if fix_them and args.fix_values:
        logger.info(u"Fixing those events and resetting _Order Forward...")
        for i, e in enumerate(fix_them, 1):
            fixed = fix_event(e, root_compscores)
            logger.info(u"[{}/{}] Pushing event {}...".format(i, len(fix_them), e['event']))
            api.put('events/{}'.format(e['event']), data=fixed)
    else:
        logger.warn(u"Not fixing events")
Пример #5
0
def main():
    args = parse_args()
    setup_logger()

    api = Api(server=args.server, username=args.username, password=args.password)

    if not is_valid_uid(args.attribute_uid):
        logger.error("Attribute {} is not a valid UID".format(args.attribute_uid))

    data = list(load_csv(args.source_csv))
    validate_csv(data)

    attr_get = {'fields': 'id,name,{}Attribute'.format(args.object_type[:-1])}
    attr = api.get('attributes/{}'.format(args.attribute_uid), params=attr_get).json()
    if attr['{}Attribute'.format(args.object_type[:-1])] is False:
        logger.error("Attribute {} is not assigned to type {}".format(args.attribute_uid, args.object_type[:-1]))

    logger.info(
        "[{}] - Updating Attribute Values for Attribute \033[1m{}\033[0m for \033[1m{}\033[0m \033[1m{}\033[0m...".format(
            args.server, args.attribute_uid, len(data), args.object_type))
    try:
        time.sleep(3)
    except KeyboardInterrupt:
        logger.warn("\033[1m{}\033[0m".format("Aborted!"))
        pass

    for i, obj in enumerate(data, 1):
        obj_uid = obj.get('key')
        attribute_value = obj.get('value')
        params_get = {'fields': ':owner'}
        obj_old = api.get('{}/{}'.format(args.object_type, obj_uid), params=params_get).json()
        obj_updated = create_or_update_attributevalues(obj=obj_old, attribute_uid=args.attribute_uid,
                                                       attribute_value=attribute_value)
        api.put('{}/{}'.format(args.object_type, obj_uid), params=None, data=obj_updated)
        logger.info(u"{}/{} - Updated AttributeValue: {} - {}: {}".format(i, len(data), attribute_value,
                                                                                        args.object_type[:-1], obj_uid))
Пример #6
0
def main():
    import argparse
    global api_source

    my_parser = argparse.ArgumentParser(
        prog='dummy_data_agg',
        description='Create dummy data for aggregated datasets',
        epilog="example1"
        "\nexample2",
        formatter_class=argparse.RawDescriptionHelpFormatter)
    my_parser.add_argument(
        'Dataset',
        metavar='dataset_param',
        type=str,
        help='the uid of the dataset to use or a string to filter datasets')
    my_parser.add_argument(
        '-sd',
        '--start_date',
        action="store",
        dest="start_date",
        type=str,
        help=
        'start date for the period to use to generate data (default is today - 1 year)'
    )
    my_parser.add_argument(
        '-ptf',
        '--period_type_filter',
        action="store",
        dest="period_type_filter",
        type=str,
        help='only applicable when having multiple datasets: d, w, m, y')
    my_parser.add_argument(
        '-ed',
        '--end_date',
        action="store",
        dest="end_date",
        type=str,
        help=
        'end date for the period to use to generate data (default is today)')
    my_parser.add_argument(
        '-ous',
        '--org_unit_selection',
        action="store",
        metavar=('type', 'value'),
        nargs=2,
        help=
        'Provide a type of org unit selection from [uid,uid_children,name,code,level] and the value to use'
        'Eg: --ous uid QXtjg5dh34A')
    # Parameters should be 0 or 1
    my_parser.add_argument('-cf',
                           '--create_flat_file',
                           action="store",
                           metavar='file_name',
                           const='xxx',
                           nargs='?',
                           help='Create spreadsheet for min/max values'
                           'Eg: --create_flat_file=my_file.csv')
    my_parser.add_argument('-uf',
                           '--use_flat_file',
                           action="store",
                           metavar='file_name',
                           nargs=1,
                           help='Use spreadsheet for min/max values'
                           'Eg: --use_flat_file=my_file.csv')
    my_parser.add_argument(
        '-i',
        '--instance',
        action="store",
        dest="instance",
        type=str,
        help=
        'instance to use for dummy data injection (robot account is required!) - default is the URL in auth.json'
    )
    my_parser.add_argument(
        '-ours',
        '--ous_random_size',
        action="store",
        dest="ous_random_size",
        type=str,
        help=
        'From all OUs selected from ous command, takes a random sample of ous_random_size'
    )

    args = my_parser.parse_args()

    credentials_file = 'auth.json'

    try:
        f = open(credentials_file)
    except IOError:
        print(
            "Please provide file auth.json with credentials for DHIS2 server")
        exit(1)
    else:
        with open(credentials_file, 'r') as json_file:
            credentials = json.load(json_file)
        if args.instance is not None:
            api_source = Api(args.instance, credentials['dhis']['username'],
                             credentials['dhis']['password'])
        else:
            api_source = Api.from_auth_file(credentials_file)

    logger.warning("Server source running DHIS2 version {} revision {}".format(
        api_source.version, api_source.revision))

    #WHAT
    dsParam = args.Dataset
    # WHERE
    ouUIDs = list()
    #WHEN
    start_date = ""
    end_date = ""
    periods = list()

    # Assign values from parameters provided if applicable
    if args.create_flat_file is None:  # If we are creating a flat file it does not matter if not provided
        if args.org_unit_selection is None:
            print(
                'Please provide a value for org_unit_selection to create the dummy data'
            )
        else:
            if len(args.org_unit_selection) >= 1:
                ouUIDs = get_org_units(args.org_unit_selection[0],
                                       args.org_unit_selection[1],
                                       int(args.ous_random_size))
                if len(ouUIDs) == 0:
                    print('The OU selection ' + args.org_unit_selection[0] +
                          ' ' + args.org_unit_selection[1] +
                          ' returned no result')
                    exit(1)

        if args.start_date is None:
            start_date = (date.today() -
                          timedelta(days=365)).strftime("%Y-%m-%d")
        else:
            start_date = args.start_date
            if not isDateFormat(start_date):
                print('Start date provided ' + start_date +
                      ' has a wrong format')
                exit(1)
        if args.end_date is None:
            end_date = (date.today()).strftime("%Y-%m-%d")
        else:
            end_date = args.end_date
            if not isDateFormat(end_date):
                print('End date provided ' + end_date + ' has a wrong format')
                exit(1)

    periods = list()

    if args.create_flat_file is not None:
        df_min_max = pd.DataFrame({},
                                  columns=[
                                      'DE UID', 'COC UID', 'DE Name',
                                      'COC Name', 'valueType', 'min', 'max'
                                  ])
    else:
        df_min_max = None

    if args.use_flat_file is not None:
        filename = args.use_flat_file
        logger.info("Reading " + filename + " for min/max value")
        df_min_max = pd.read_csv(filename, sep=None, engine='python')

    CC = api_source.get('categoryCombos',
                        params={
                            "paging": "false",
                            "fields": "id,name,categoryOptionCombos"
                        }).json()['categoryCombos']
    CC = reindex(CC, 'id')
    defaultCC = ''
    for catcomboUID in CC:
        if CC[catcomboUID]['name'] == 'default':
            defaultCC = catcomboUID
            break
    if defaultCC == '':
        logger.warning('Could not find default Category Combo')

    COC = api_source.get('categoryOptionCombos',
                         params={
                             "paging": "false",
                             "fields": "id,name"
                         }).json()['categoryOptionCombos']
    COC = reindex(COC, 'id')

    DE = api_source.get(
        'dataElements',
        params={
            "paging": "false",
            "fields":
            "id,name,categoryCombo,aggregationType,valueType,optionSet"
        }).json()['dataElements']
    DE = reindex(DE, 'id')

    # Check for optionSets in the DE
    optionSetUIDs = list()
    for de in DE:
        if 'optionSet' in de:
            optionSetUIDs.append(de['optionSet']['id'])
    if len(optionSetUIDs) > 0:
        options = api_source.get('options',
                                 params={
                                     "paging":
                                     "false",
                                     "fields":
                                     "id,name,code",
                                     "filter":
                                     "optionSet.id:eq:" +
                                     ','.join(optionSetUIDs)
                                 }).json()['options']

    de_numeric_types = [
        'INTEGER_POSITIVE', 'INTEGER', 'INTEGER_ZERO_OR_POSITIVE', 'NUMBER',
        'PERCENTAGE', 'INTEGER_ZERO_OR_NEGATIVE'
    ]

    # Get the datasets"
    if is_valid_uid(dsParam):
        dataset_filter = "id:eq:" + dsParam
    else:
        dataset_filter = "name:like:" + dsParam

    dataSets = api_source.get(
        'dataSets',
        params={
            "paging": "false",
            "fields": "id,name,dataSetElements,periodType,"
            "formType,dataEntryForm,sections,organisationUnits",
            "filter": dataset_filter
        }).json()['dataSets']
    # Only one dataSet
    if len(dataSets) == 0:
        logger.error("Could not find any dataset")
        exit(1)
    else:
        if len(dataSets) > 1 and args.period_type_filter is not None:
            periodTypeFilter = args.period_type_filter
            if periodTypeFilter.lower() not in [
                    'daily', 'weekly', 'monthly', 'quarterly', 'yearly'
            ]:
                logger.error('Period type to filter not supported:' +
                             periodTypeFilter)
            else:
                filteredDatasets = list()
                for ds in dataSets:
                    if ds['periodType'].lower() == periodTypeFilter.lower():
                        filteredDatasets.append(ds)
                dataSets = filteredDatasets

        # Create workbook
        if args.create_flat_file is not None:
            ouput_file_name = 'datasets_' + dsParam + '.xlsx'
            ouput_file_name = args.create_flat_file + '.xlsx'
            writer = pd.ExcelWriter(ouput_file_name)
        for ds in dataSets:
            logger.info("Processing dataset " + ds['name'])
            if start_date != "" and end_date != "":
                logger.info("Period type is " + ds['periodType'] +
                            " - Generating periods from " + start_date +
                            " to " + end_date)
                periods = get_periods(ds['periodType'], start_date, end_date)
            if len(ouUIDs) > 0:
                logger.info("Verifying org unit selection")
                for ou_uid in ouUIDs:
                    if not is_ou_assigned_to_ds(ou_uid, ds):
                        ouUIDs.remove(ou_uid)
                        logger.warning("Org unit " + ou_uid +
                                       " is not assigned to dataset " +
                                       ds['id'])

            dsDataElements = dict()
            greyedFields = list()

            # Analyse the sections of the dataSet looking for greyedFields
            if 'sections' in ds:
                sectionUIDs = ""
                for section in ds['sections']:
                    sectionUIDs += (section['id'] + ",")
                logger.info("Found " + str(sectionUIDs.count(',')) +
                            " sections in dataset")
                # Get sections
                sections = api_source.get(
                    'sections',
                    params={
                        "paging": "false",
                        "fields":
                        "id,name,greyedFields[dataElement,categoryOptionCombo]",
                        "filter": "id:in:[" + sectionUIDs + "]"
                    }).json()['sections']
                for section in sections:
                    if len(section['greyedFields']) > 0:
                        for element in section['greyedFields']:
                            greyedFields.append(
                                element['dataElement']['id'] + '.' +
                                element['categoryOptionCombo']['id'])

            # Get dataElements
            for DSE in ds['dataSetElements']:
                df_min_max = pd.DataFrame({},
                                          columns=[
                                              'DE UID', 'COC UID', 'DE Name',
                                              'COC Name', 'valueType', 'min',
                                              'max'
                                          ])
                de = ''
                if 'dataElement' in DSE:
                    deUID = DSE['dataElement']['id']
                    dsDataElements[deUID] = dict()
                    de = DE[deUID]  # Get all dataElement information
                    dsDataElements[deUID]['valueType'] = de['valueType']

                    # Add options to the dataelement dict if pertinent
                    if 'optionSet' in de:
                        options = api_source.get('options',
                                                 params={
                                                     "paging":
                                                     "false",
                                                     "fields":
                                                     "id,name,code",
                                                     "filter":
                                                     "optionSet.id:eq:" +
                                                     de['optionSet']['id']
                                                 }).json()['options']
                        dsDataElements[deUID]['options'] = list()
                        for option in options:
                            dsDataElements[deUID]['options'].append(
                                option['code'])

                    # Check if the Category Combo is specified in the dataElement definition
                    COCs = list()
                    if 'categoryCombo' in de and de['categoryCombo'][
                            'id'] != defaultCC:
                        COCs = CC[de['categoryCombo']
                                  ['id']]['categoryOptionCombos']

                    # Check if Category Combo is specified for the dataElement in the dataSet
                    elif 'categoryCombo' in DSE and DSE['categoryCombo'][
                            'id'] != defaultCC:
                        COCs = CC[DSE['categoryCombo']
                                  ['id']]['categoryOptionCombos']

                    # Add COCs to the dataElement dictionary
                    if len(COCs) > 0:
                        dsDataElements[deUID]['COCs'] = list()
                        for coc in COCs:
                            dsDataElements[deUID]['COCs'].append(coc['id'])

            logger.info("Found " + str(len(dsDataElements)) +
                        " dataElements in dataset")

            if args.create_flat_file is not None:
                for de in dsDataElements:
                    if 'COCs' in dsDataElements[de]:
                        for coc in dsDataElements[de]['COCs']:
                            str_pair = de + "." + coc
                            if str_pair not in greyedFields:
                                df_min_max = df_min_max.append(
                                    {
                                        "DE UID":
                                        de,
                                        "COC UID":
                                        coc,
                                        "DE Name":
                                        DE[de]['name'],
                                        "COC Name":
                                        COC[coc]['name'],
                                        "valueType":
                                        dsDataElements[de]['valueType'],
                                        "min":
                                        "",
                                        "max":
                                        ""
                                    },
                                    ignore_index=True)
                    else:
                        df_min_max = df_min_max.append(
                            {
                                "DE UID": de,
                                "COC UID": "",
                                "DE Name": DE[de]['name'],
                                "COC Name": "",
                                "valueType": dsDataElements[de]['valueType'],
                                "min": "",
                                "max": ""
                            },
                            ignore_index=True)

                # Save csv file
                # export_csv = df_min_max.to_csv(r'./ds_' + ds['name'].replace(' ', '_') + '_min_max.csv', index=None,
                #                               header=True)
                df_min_max.to_excel(writer, ds['id'], index=False)

            else:
                dataValueSets = list()
                ouCount = 1
                for ouUID in ouUIDs:
                    logger.info("Processing org unit " + ouUID + " - " +
                                str(ouCount) + "/" + str(len(ouUIDs)))
                    for period in periods:
                        #logger.info("Processing period " + period)
                        for de in dsDataElements:
                            value_type = dsDataElements[de]['valueType']
                            min_value = max_value = None
                            options = None
                            if 'options' in dsDataElements[de]:
                                options = dsDataElements[de]['options']
                            if 'COCs' in dsDataElements[de]:
                                for coc in dsDataElements[de]['COCs']:
                                    str_pair = de + "." + coc
                                    if str_pair not in greyedFields:
                                        if df_min_max is not None:
                                            min_value, max_value = get_min_max_from_df(
                                                df_min_max, value_type, de,
                                                coc)
                                        # logger.info(
                                        #     "Generating value for DE (" + value_type + "): " + DE[de]['name'] + " with COC")
                                        value = generate_dummy_value({
                                            'value_type':
                                            value_type,
                                            'min_value':
                                            min_value,
                                            'max_value':
                                            max_value,
                                            'options':
                                            options
                                        })
                                        if value is not None:  # Skip if it is None
                                            dataValueSets.append({
                                                "dataElement":
                                                de,
                                                "categoryOptionCombo":
                                                coc,
                                                "value":
                                                value,
                                                "orgUnit":
                                                ouUID,
                                                "period":
                                                period
                                            })
                                    # else:
                                    #     logger.warning('Skipping ' + str_pair + ' because is greyed in section')
                            else:
                                if df_min_max is not None:
                                    min_value, max_value = get_min_max_from_df(
                                        df_min_max, value_type, de)
                                # logger.info("Generating value for DE (" + value_type + "): " + DE[de]['name'])
                                value = generate_dummy_value({
                                    'value_type': value_type,
                                    'min_value': min_value,
                                    'max_value': max_value,
                                    'options': options
                                })
                                if value is not None:  # Skip if it is None
                                    dataValueSets.append({
                                        "dataElement": de,
                                        "value": value,
                                        "orgUnit": ouUID,
                                        "period": period
                                    })

                    post_to_server({'dataValues': dataValueSets},
                                   'dataValueSets')
                    dataValueSets = list()
                    ouCount += 1

        if args.create_flat_file is not None:
            writer.save()
def main():
    args = parse_args()
    setup_logger()

    api = Api(server=args.server, username=args.username, password=args.password)

    if '.psi-mis.org' not in args.server and '.hnqis.org' not in args.server:
        logger.warn("This script is intended only for *.psi-mis.org or *.hnqis.org")
        sys.exit(0)

    indicators = {}
    backup_indicators = []
    container = []

    for ha in HEALTH_AREAS:

        # VMMC splits do not have their own HA
        if ha == 'VMMC':
            p1 = {
                'paging': False,
                'filter': [
                    'name:like:HNQIS - {}'.format(ha),
                    'name:like$:count',
                    'program.name:!like:v1'  # don't get v1 programIndicators
                ],
                'fields': '[id,name]'
            }
        else:
            p1 = {
                'paging': False,
                'filter': [
                    'name:like:HNQIS - {} count'.format(ha),
                    'program.name:!like:v1'  # don't get v1 programIndicators
                ],
                'fields': '[id,name]'
            }
        data1 = api.get('programIndicators', params=p1).json()
        pi_uids = [p['id'] for p in data1['programIndicators']]

        p2 = {
            'paging': False,
            'filter': ['name:eq:HNQIS - {} count'.format(ha)],
            'fields': ':owner'
        }
        data2 = api.get('indicators', params=p2).json()
        backup_indicators.append(data2['indicators'])

        if ha == 'VMMC':
            p3 = {
                'paging': False,
                'filter': [
                    'shortName:like: HNQIS {}'.format(ha),
                    'name:!like:v1'
                ],
                'fields': 'id,name'
            }
        else:
            p3 = {
                'paging': False,
                'filter': [
                    'shortName:like: HNQIS {}'.format(ha),  # 2.30 would need to change filters
                    'name:!like:v1'
                ],
                'fields': 'id,name'
            }
        data3 = api.get('programs', params=p3).json()
        no_of_programs = len(data3['programs'])

        if no_of_programs != len(pi_uids):
            print(u"\033[1mWarning\033[1m\033[0m - number of {} programs ({}) "
                  u"does not match number of 'count' programIndicators ({})!".format(ha, no_of_programs, len(pi_uids)))
            print("\n".join([x['name'] for x in data3['programs']]))

        if len(data2['indicators']) == 1:
            i = data2['indicators'][0]
            i['numerator'] = create_numerator(pi_uids)
            container.append(i)
            print(u'  \033[1m{}\033[0m - Added {} programIndicators to numerator of indicator "{}"'.format(ha, len(pi_uids), i['name']))

        elif len(data2['indicators']) > 1:
            print(u"\033[1mMore than one indicator found for health area {}\033[0m".format(ha))
        elif len(pi_uids) != 0:
            print(u"\033[1mNo indicator found for health area {}\033[0m".format(ha))

    dump_to_file(backup_indicators)
    indicators['indicators'] = container

    print(u"Posting updated programindicators to \033[1m{}\033[0m...".format(args.server))
    time.sleep(3)
    
    api.post('metadata', params={'importMode': 'COMMIT', 'preheatCache': False}, data=indicators)
Пример #8
0
                            'mergeMode': 'REPLACE',
                            'importStrategy': 'CREATE_AND_UPDATE'
                        },
                        json={'dataElements': [dummy_data_de]})
except RequestException as e:
    # Print errors returned from DHIS2
    logger.error("POST failed with error " + str(e))
    exit()
else:
    print('Data element ' + de_uid + ' created')

# Get OU level 1
try:
    ou = api.get('organisationUnits',
                 params={
                     'fields': 'id,name',
                     'filter': 'level:eq:1'
                 }).json()['organisationUnits']
except RequestException as e:
    # Print errors returned from DHIS2
    logger.error("GET ou failed with error " + str(e))
    exit()
else:
    print('Using root OU ' + ou[0]['name'])
    ou_uid = ou[0]['id']

# Get periods
dataValueSets = list()
for period in get_periods(frequency, start_date, end_date):
    print("Creating dummy data for period " + period)
    dataValueSets.append({
Пример #9
0
def main():

    my_parser = argparse.ArgumentParser(description='dashboard_checker')
    my_parser.add_argument('-i',
                           '--instance',
                           action="store",
                           dest="instance",
                           type=str,
                           help='URL of the instance to process')
    my_parser.add_argument(
        '-df',
        '--dashboard_filter',
        action="store",
        dest="dashboard_filter",
        type=str,
        help='Either a prefix or a list of comma separated UIDs')
    my_parser.add_argument('--no_data_warning',
                           dest='no_data_warning',
                           action='store_true')
    my_parser.add_argument('--omit-no_data_warning',
                           dest='no_data_warning',
                           action='store_false')
    my_parser.add_argument('-v',
                           '--verbose',
                           dest='verbose',
                           action='store_true')
    my_parser.set_defaults(no_data_warning=True)
    my_parser.set_defaults(verbose=False)
    args = my_parser.parse_args()

    if args.instance is not None:
        instances = [{
            'name': args.instance.split('/')[-1].replace(':', '_'),
            'url': args.instance
        }]
    else:
        instances = [
            #{'name':'newdemos', 'url':'https://who-demos.dhis2.org/newdemos', 'SQL_view_TRK':'xfemQFHUTUV', 'SQL_view_AGG':'lg8lFbDMw2Z'}
            #{'name':'tracker_dev', 'url': 'https://who-dev.dhis2.org/tracker_dev', 'SQL_view_TRK': 'xfemQFHUTUV', 'SQL_view_AGG': 'lg8lFbDMw2Z'}
            {
                'name': 'covid-19',
                'url': 'https://demos.dhis2.org/covid-19',
                'SQL_view_TRK': 'xfemQFHUTUV',
                'SQL_view_AGG': 'lg8lFbDMw2Z'
            }
        ]

    log_file = "./dashboard_checker.log"
    setup_logger(log_file)

    credentials_file = './auth.json'

    df = pd.DataFrame({},
                      columns=[
                          'dashboard_name', 'type', 'uid', 'name', 'issue',
                          'api_link', 'app_link'
                      ])

    errors_found = 0

    for instance in instances:
        try:
            f = open(credentials_file)
        except IOError:
            print(
                "Please provide file auth.json with credentials for DHIS2 server"
            )
            exit(1)
        else:
            with open(credentials_file, 'r') as json_file:
                credentials = json.load(json_file)
            api_source = Api(instance['url'], credentials['dhis']['username'],
                             credentials['dhis']['password'])

        # Get dashboards
        params = {"fields": "*", "paging": "false"}
        if args.dashboard_filter is not None:
            item_list = args.dashboard_filter.split(',')
            if len(item_list) == 1 and not is_valid_uid(item_list[0]):
                params["filter"] = "name:$like:" + args.dashboard_filter
            # Let's consider it as a list of uids
            else:
                # Validate the list
                for item in item_list:
                    if not is_valid_uid(item):
                        logger.error("UID " + item +
                                     " is not a valid DHIS2 UID")
                        exit(1)
                params["filter"] = "id:in:[" + args.dashboard_filter + "]"

        dashboards = api_source.get('dashboards',
                                    params=params).json()['dashboards']

        dashboard_item_with_issues_row = dict()

        for dashboard in dashboards:
            logger.info('Processing dashboard ' + dashboard['name'])
            dashboard_item_with_issues_row['dashboard_name'] = dashboard[
                'name']
            if '2.33' not in api_source.version:
                dashboard_items = [
                    'visualization', 'eventReport', 'eventChart', 'map'
                ]
            else:
                dashboard_items = [
                    'chart', 'reportTable', 'eventReport', 'eventChart', 'map'
                ]
            for dashboardItem in dashboard['dashboardItems']:
                # The dashboard item could be of type TEXT, for example
                # in this case there is nothing to do
                dashboard_item_type_found = False
                for dashboard_item in dashboard_items:
                    if dashboard_item in dashboardItem:
                        dashboard_item_type_found = True
                        dashboard_item_with_issues_row['issue'] = ""
                        dashboard_item_with_issues_row['type'] = dashboard_item
                        dashboard_item_with_issues_row['uid'] = dashboardItem[
                            dashboard_item]['id']
                        dashboard_item_with_issues_row['name'] = ""
                        if args.verbose:
                            logger.info('Trying ' + dashboard_item + ' ' +
                                        dashboardItem[dashboard_item]['id'])
                        try:
                            api_endpoint = dashboard_item + 's/' + dashboardItem[
                                dashboard_item]['id']
                            dashboard_item_with_issues_row[
                                'api_link'] = instance[
                                    'url'] + '/api/' + api_endpoint
                            item = api_source.get(api_endpoint,
                                                  params={
                                                      "fields": "*"
                                                  }).json()
                        except RequestException as e:
                            logger.error(dashboard_item + ' ' +
                                         dashboardItem[dashboard_item]['id'] +
                                         " BROKEN with error " + str(e))
                            dashboard_item_with_issues_row['issue'] = str(e)
                            errors_found += 1
                        else:
                            dashboard_item_with_issues_row['name'] = item[
                                'name']
                            if dashboard_item in ['eventReport', 'eventChart']:
                                continue
                            # Try to get the data
                            try:
                                if dashboard_item == 'map':
                                    for map_view in item['mapViews']:
                                        params = build_analytics_payload(
                                            map_view, args.verbose)
                                        if params != {}:
                                            if 'layer' in map_view and map_view[
                                                    'layer'] == 'event' and 'program' in map_view:
                                                data = api_source.get(
                                                    'analytics/events/query/' +
                                                    map_view['program']['id'],
                                                    params=params).json()
                                            else:
                                                data = api_source.get(
                                                    'analytics',
                                                    params=params).json()
                                else:
                                    data = api_source.get(
                                        'analytics',
                                        params=build_analytics_payload(
                                            item, args.verbose)).json()
                            except RequestException as e:
                                logger.error(
                                    dashboard_item + ' ' +
                                    dashboardItem[dashboard_item]['id'] +
                                    " data cannot be retrieved with error " +
                                    str(e))
                                dashboard_item_with_issues_row['issue'] = str(
                                    e)
                                errors_found += 1
                            else:
                                # print(data['rows'])
                                if args.no_data_warning and (
                                        'rows' not in data
                                        or len(data['rows']) == 0):
                                    dashboard_item_with_issues_row[
                                        'issue'] = 'NO DATA'
                                    logger.warning(
                                        dashboardItem[dashboard_item]['id'] +
                                        ': NO DATA!!!')

                            #exit(0)

                if dashboard_item_type_found and dashboard_item_with_issues_row[
                        'issue'] != "":
                    if dashboard_item_with_issues_row[
                            'type'] == 'visualization':
                        dashboard_item_with_issues_row['app_link'] = instance['url'] + \
                                                                     '/dhis-web-data-visualizer/index.html#/' + \
                                                                     dashboard_item_with_issues_row['uid']
                    elif dashboard_item_with_issues_row['type'] == 'map':
                        dashboard_item_with_issues_row['app_link'] = instance['url'] + \
                                                                     '/dhis-web-maps/index.html'
                    elif dashboard_item_with_issues_row[
                            'type'] == 'eventReport':
                        dashboard_item_with_issues_row['app_link'] = instance['url'] + \
                                                                     'dhis-web-event-reports/index.html?id=' + \
                                                                     dashboard_item_with_issues_row['uid']
                    elif dashboard_item_with_issues_row[
                            'type'] == 'eventChart':
                        dashboard_item_with_issues_row['app_link'] = instance['url'] + \
                                                                     '/dhis-web-event-visualizer/index.html?id=' + \
                                                                     dashboard_item_with_issues_row['uid']
                    df = df.append(dashboard_item_with_issues_row,
                                   ignore_index=True)

    export_csv = df.to_csv(instance['name'] + '.csv', index=None, header=True)

    # Release log handlers
    handlers = logger.handlers[:]
    for handler in handlers:
        handler.close()
        logger.removeHandler(handler)

    return errors_found