def main():
    # Print DHIS2 Info
    logger.warn(
        "You are running on DHIS2 version {} revision {} - "
        "Last Analytics generation was at: {}".format(
            api.version, api.revision, api.info.get("lastAnalyticsTableSuccess")
        )
    )

    # GET dataElements that contain ANC in its name
    params = {"filter": "name:like:ANC", "paging": False, "fields": ":owner"}
    data_elements = api.get("dataElements", params=params).json()

    # Loop through each dataElement
    for de in data_elements["dataElements"]:
        # Add (updated) to the name
        de["name"] = "{} (updated)".format(de["name"])

        try:
            # Replace the dataElement on the server
            api.put(
                "dataElements/{}".format(de["id"]),
                params={"mergeMode": "REPLACE"},
                json=de,
            )
        except RequestException as e:
            # Print errors returned from DHIS2
            logger.error(
                "Updating DE '{}' ({}) failed: {}".format(de["name"], de["id"], e)
            )
        else:
            # Print success message
            logger.info("Updated DE '{}' ({}) successful".format(de["name"], de["id"]))
示例#2
0
def main():
    # Print DHIS2 Info
    logger.warn("You are running on DHIS2 version {} revision {} - "
                "Last Analytics generation was at: {}".format(
                    api.version, api.revision,
                    api.info.get('lastAnalyticsTableSuccess')))

    # GET dataElements that contain ANC in its name
    params = {'filter': 'name:like:ANC', 'paging': False, 'fields': ':owner'}
    data_elements = api.get('dataElements', params=params).json()

    # Loop through each dataElement
    for de in data_elements['dataElements']:
        # Add (updated) to the name
        de['name'] = '{} (updated)'.format(de['name'])

        try:
            # Replace the dataElement on the server
            api.put('dataElements/{}'.format(de['id']),
                    params={'mergeMode': 'REPLACE'},
                    json=de)
        except RequestException as e:
            # Print errors returned from DHIS2
            logger.error("Updating DE '{}' ({}) failed: {}".format(
                de['name'], de['id'], e))
        else:
            # Print success message
            logger.info("Updated DE '{}' ({}) successful".format(
                de['name'], de['id']))
示例#3
0
def test_setup_logger_default():
    from dhis2 import logger, setup_logger
    setup_logger()
    logger.info("info")
    logger.warning("warn")
    logger.debug("debug")
    logger.error("error")
示例#4
0
def main():
    setup_logger()
    args = parse_args()

    api = create_api(server=args.server,
                     username=args.username,
                     password=args.password)

    p = {
        'fields':
        'id,name,description,leftSide[expression],rightSide[expression]',
        'paging': False
    }
    data = api.get('validationRules', params=p).json()

    uid_cache = set()
    for i, rule in enumerate(data['validationRules'], 1):
        info_msg = "{}/{} Analyzing Validation Rule '{}' ({})"
        logger.info(
            info_msg.format(i, len(data['validationRules']), rule['name'],
                            rule['id']))

        uids_in_expressions = extract_uids(rule)
        for uid in uids_in_expressions:
            if uid not in uid_cache:
                try:
                    api.get('identifiableObjects/{}'.format(uid)).json()
                except APIException as exc:
                    if exc.code == 404:
                        logger.warn(
                            "UID in expression not identified: {}".format(uid))
                    else:
                        logger.error(exc)
                else:
                    uid_cache.add(uid)
示例#5
0
def check_validation_rules(api):
    p = {
        'fields':
        'id,name,description,leftSide[expression],rightSide[expression]',
        'paging': False
    }
    data = api.get('validationRules', params=p).json()

    logger.info("*** CHECKING {} VALIDATION RULES... ***".format(
        len(data['validationRules'])))

    for rule in data['validationRules']:
        uid_cache = set()

        uids_in_expressions = extract_uids(rule)
        for uid in uids_in_expressions:
            if uid not in uid_cache:
                try:
                    api.get('identifiableObjects/{}'.format(uid)).json()
                except RequestException as exc:
                    if exc.code == 404:
                        logger.warn(
                            "Validation Rule '{}' ({}) - "
                            "UID in expression not identified: {}".format(
                                rule['name'], rule['id'], uid))
                        uid_cache.add(uid)
                    else:
                        logger.error(exc)
                else:
                    uid_cache.add(uid)
示例#6
0
def test_setup_logger_to_file():
    from dhis2 import logger, setup_logger

    filename = os.path.join(tempfile.gettempdir(), 'logfile.log')

    setup_logger(logfile=filename)
    logger.info("info")
    logger.warning("warn")
    logger.debug("debug")
    logger.error("error")

    assert os.path.isfile(filename)
示例#7
0
def post_chunked_data(api_endpoint, data_list, json_key, chunk_max_size):
    number_elems = len(data_list)
    if number_elems <= chunk_max_size:
        post_to_server(api_endpoint, {json_key: data_list}, json_key)
    chunk = dict()
    if number_elems < chunk_max_size:
        chunk_max_size = number_elems
    count = 0
    for x in range(0, number_elems, chunk_max_size):
        chunk[json_key] = data_list[x:((x + chunk_max_size) if number_elems > (
            x + chunk_max_size) else number_elems)]
        count += 1

        retries = 0
        while retries <= 5:
            try:
                response = api_endpoint.post(json_key,
                                             params={
                                                 'mergeMode': 'REPLACE',
                                                 'strategy':
                                                 'CREATE_AND_UPDATE'
                                             },
                                             json=chunk)

            except RequestException as e:
                logger.error(str(e))
                time.sleep(3)
                retries += 1
            else:
                # Print success message
                text = json.loads(response.text)
                if 'status' in text and text['status'] == 'ERROR':
                    errorReport = text['typeReports'][0]['objectReports'][0][
                        'errorReports'][0]
                    logger.error(errorReport)
                    errorCode = errorReport['errorCode']
                else:
                    if 'response' in text:
                        for key in [
                                'importSummaries', 'importOptions',
                                'responseType'
                        ]:
                            if key in text:
                                text.pop(key, None)
                        logger.info(
                            json.dumps(text['response'],
                                       indent=4,
                                       sort_keys=True))
                    logger.info("Operation successful: chunk " + str(count) +
                                " of " + str(json_key) + " created/updated")
                break
示例#8
0
def find_possible_translations(df, en_string, lang):
    match = ""
    if 'en' in df.columns:
        index = 0
        for english_text in df['en'].tolist():
            if lev.ratio(en_string.lower(), english_text.lower()) > 0.95:
                break
            index += 1
        if index != len(df['en'].tolist()) and lang in df.columns:
            match = df.iloc[index][lang]
    else:
        logger.error('Dataframe does not have en column')

    return match
示例#9
0
def main():
    # load the JSON file that sits next to the script
    data = load_json('2_import_metadata.json')

    try:
        # import metadata
        api.post('metadata.json',
                 params={
                     'preheatCache': False,
                     'strategy': 'CREATE'
                 },
                 json=data)
    except RequestException as e:
        logger.error("Import failed: {}".format(e))
    else:
        logger.info("Import successful!")
示例#10
0
def main():
    args = parse_args()
    setup_logger()

    api = Api(server=args.server,
              username=args.username,
              password=args.password)

    data = list(load_csv(args.source_csv))
    validate_csv(data)

    programs_csv = [h.strip() for h in data[0] if h != 'orgunit']
    if not programs_csv:
        raise ValueError('No programs found')
    params_get = {'fields': 'id', 'paging': False}
    programs_server = [
        p['id']
        for p in api.get('programs', params=params_get).json()['programs']
    ]
    for p in programs_csv:
        if p not in programs_server:
            logger.error(
                u"Program {0} is not a valid program: {1}/programs/{0}.json".
                format(p, api.api_url))

    program_orgunit_map = get_program_orgunit_map(data)
    metadata_payload = []
    final = {}
    for program_uid, orgunit_list in iteritems(program_orgunit_map):
        params_get = {'fields': ':owner'}
        program = api.get('programs/{}'.format(program_uid),
                          params=params_get).json()
        updated = set_program_orgunits(program, orgunit_list,
                                       args.append_orgunits)
        metadata_payload.append(updated)

        with open('backup_{}.json'.format(program_uid), 'w') as f:
            json.dump(program, f, indent=4)

        print(u"[{}] - Assigning \033[1m{} (total: {})\033[0m "
              u"OrgUnits to Program \033[1m{}\033[0m...".format(
                  args.server, len(orgunit_list),
                  len(program['organisationUnits']), program['name']))

        final['programs'] = [updated]
        params_post = {"mergeMode": "REPLACE", "strategy": "UPDATE"}
        api.post(endpoint='metadata', params=params_post, data=final)
示例#11
0
def main():
    # load the JSON file that sits next to the script
    data = load_json("2_import_metadata.json")

    try:
        # import metadata
        api.post(
            "metadata.json",
            params={
                "preheatCache": False,
                "strategy": "CREATE"
            },
            json=data,
        )
    except RequestException as e:
        logger.error("Import failed: {}".format(e))
    else:
        logger.info("Import successful!")
示例#12
0
def post_to_server(api, jsonObject, apiObject='metadata', strategy='CREATE_AND_UPDATE'):
    try:
        response = api.post(apiObject, params={'mergeMode': 'REPLACE', 'importStrategy': strategy},
                                   json=jsonObject)

    except RequestException as e:
        # Print errors returned from DHIS2
        logger.error("metadata update failed with error " + str(e))
        pass
    else:
        if response is None:
            logger.error("Error in response from server")
            return
        text = json.loads(response.text)
        # print(text)
        if text['status'] == 'ERROR':
            logger.error("Import failed!!!!\n" + json.dumps(text['typeReports'], indent=4, sort_keys=True))
            return False
        # errorCode = errorReport['errorCode']
        else:
            if apiObject == 'metadata':
                logger.info("metadata imported " + text['status'] + " " + json.dumps(text['stats']))
            else:
                # logger.info("data imported " + text['status'] + " " + json.dumps(text['importCount']))
                logger.info("Data imported\n" + json.dumps(text, indent=4, sort_keys=True))
                if text['status'] == 'WARNING': logger.warning(text)
            return True
示例#13
0
def main():
    args = parse_args()
    setup_logger()

    api = Api(server=args.server, username=args.username, password=args.password)

    if not is_valid_uid(args.attribute_uid):
        logger.error("Attribute {} is not a valid UID".format(args.attribute_uid))

    data = list(load_csv(args.source_csv))
    validate_csv(data)

    attr_get = {'fields': 'id,name,{}Attribute'.format(args.object_type[:-1])}
    attr = api.get('attributes/{}'.format(args.attribute_uid), params=attr_get).json()
    if attr['{}Attribute'.format(args.object_type[:-1])] is False:
        logger.error("Attribute {} is not assigned to type {}".format(args.attribute_uid, args.object_type[:-1]))

    logger.info(
        "[{}] - Updating Attribute Values for Attribute \033[1m{}\033[0m for \033[1m{}\033[0m \033[1m{}\033[0m...".format(
            args.server, args.attribute_uid, len(data), args.object_type))
    try:
        time.sleep(3)
    except KeyboardInterrupt:
        logger.warn("\033[1m{}\033[0m".format("Aborted!"))
        pass

    for i, obj in enumerate(data, 1):
        obj_uid = obj.get('key')
        attribute_value = obj.get('value')
        params_get = {'fields': ':owner'}
        obj_old = api.get('{}/{}'.format(args.object_type, obj_uid), params=params_get).json()
        obj_updated = create_or_update_attributevalues(obj=obj_old, attribute_uid=args.attribute_uid,
                                                       attribute_value=attribute_value)
        api.put('{}/{}'.format(args.object_type, obj_uid), params=None, data=obj_updated)
        logger.info(u"{}/{} - Updated AttributeValue: {} - {}: {}".format(i, len(data), attribute_value,
                                                                                        args.object_type[:-1], obj_uid))
示例#14
0
def validate_file(filename):
    if not os.path.exists(filename):
        raise PKClientException("File does not exist: {}".format(filename))
    if not os.path.getsize(filename) > 0:
        raise PKClientException("File is empty: {}".format(filename))


def main():
    args, password = parse_args()
    setup_logger(include_caller=False)
    api = create_api(server=args.server,
                     username=args.username,
                     password=password)
    validate_file(args.css)
    post_file(api, filename=args.css)
    logger.info(
        "{} CSS posted to {}. Clear your Browser cache / use Incognito.".
        format(args.css, api.api_url))


if __name__ == "__main__":
    try:
        main()
    except KeyboardInterrupt:
        logger.warn("Aborted.")
    except PKClientException as e:
        logger.error(e)
    except Exception as e:
        logger.exception(e)
示例#15
0
文件: dd.py 项目: mulaah/dhis2-utils
def choices_with_ratio(values, ratios, k):
    # values -> list of values to use. It can be just a simple value or an interval delimited with :
    # ratios -> list of ratios to use. Must have same length as ratios
    # k -> Number of values to generate
    # Make sure ratio is not Nan or empty string
    ratios = [x if not isnull(x) and x != "" else float(0) for x in ratios]
    rationed_number = [int(round(x * k)) for x in ratios]
    # The total number of values which will be generated applying the ratios and rounding the result
    total_generated = sum(rationed_number)
    if len(ratios) > k or total_generated > (k + k / 2):
        logger.warning(
            'The number of values to generate is too small for the high amount of ratios provided'
        )
    if total_generated != k:
        # Find the ratios to correct
        # The idea is that if we have generated less than the total we will randomly increase the elements
        # having the highest ratio (to get more of what we should have more). Otherwise we will decrease
        # the elements with lowest ratio (to get less of what we should have less)
        if total_generated < k:
            ratios_to_correct = max(ratios)
        else:
            ratios_to_correct = min(ratios)
            if ratios_to_correct == float(0):
                #Remove them from ratios
                tmp_ratios = [
                    ratios[i] for i in range(len(ratios))
                    if ratios[i] != ratios_to_correct
                ]
                ratios_to_correct = min(tmp_ratios)
        # Index returns the first occurrence
        # highest_ratio_index = ratios.index(highest_ratio)
        # Find all occurrences
        indices = [
            i for i in range(len(ratios)) if ratios[i] == ratios_to_correct
        ]
        number_of_iterations = 0
        while total_generated != k:
            if total_generated < k:
                # Add 1 to element with highest ratio
                rationed_number[choice(indices)] += 1
            elif total_generated > k:
                # Subtract 1 to element with highest ratio
                choosen_random_index = choice(indices)
                if rationed_number[choosen_random_index] > 0:
                    rationed_number[choosen_random_index] -= 1
                else:
                    # Take it out from ratios and recalculate
                    ratios[choosen_random_index] = 0.0
                    minimum_ratio_not_zero = 1.0
                    indices = list()
                    for r in ratios:
                        if r < minimum_ratio_not_zero and r != 0.0:
                            minimum_ratio_not_zero = r
                    indices = [
                        i for i in range(len(ratios))
                        if ratios[i] == minimum_ratio_not_zero
                    ]

            total_generated = sum(rationed_number)
            number_of_iterations += 1

            # We should not spend here too much time, otherwise it is worth resetting the indexes

            # if number_of_iterations == 25:
            #     indices = [i for i in range(len(ratios)) if ratios[i] <= (ratios_to_correct+0.1)]
            #     number_of_iterations = 0
    # Create list of values to return
    choices = list()
    if ':' not in values[0]:
        for i in range(0, len(values)):
            choices.extend([values[i]] * rationed_number[i])
    else:
        for i in range(0, len(values)):
            min_max_values = str(values[i]).split(":")
            if len(min_max_values) == 2:
                if isInt(min_max_values[0]) and isInt(min_max_values[1]):
                    min_value = int(min_max_values[0])
                    max_value = int(min_max_values[1])
                    if min_value < max_value:
                        choices.extend(
                            numpy.random.randint(min_value, max_value,
                                                 rationed_number[i]))
                    else:
                        logger.error('min value ' + str(min_value) +
                                     ' is greater than max value ' +
                                     str(max_value))
                elif isFloat(min_max_values[0]) and isFloat(min_max_values[1]):
                    min_value = float(min_max_values[0])
                    max_value = float(min_max_values[1])
                    if min_value < max_value:
                        choices.extend(
                            numpy.random.uniform(min_value, max_value,
                                                 rationed_number[i]))
                    else:
                        logger.error('min value ' + str(min_value) +
                                     ' is greater than max value ' +
                                     str(max_value))
                elif isDateFormat(min_max_values[0]) and (isDateFormat(
                        min_max_values[1]) or min_max_values[1] == 'today'):
                    min_date = datetime.strptime(min_max_values[0],
                                                 '%Y-%m-%d').date()
                    if min_max_values[1] == 'today':
                        max_date = date.today()
                    else:
                        max_date = datetime.strptime(min_max_values[1],
                                                     '%Y-%m-%d').date()
                    if min_date < max_date:
                        days_between_dates = (max_date - min_date).days
                        random_days = numpy.random.randint(
                            0, days_between_dates, rationed_number[i])
                        # For the moment, return date type
                        choices.extend(
                            list(
                                map(
                                    lambda x:
                                    (min_date + timedelta(days=int(x))),
                                    random_days)))
                    else:
                        logger.error('min date ' + min_max_values[0] +
                                     ' is greater than max date ' +
                                     min_max_values[1])
                else:
                    logger.error('Could not recognize value type for ' +
                                 min_max_values)

    shuffle(choices)

    return choices
示例#16
0
def main():

    my_parser = argparse.ArgumentParser(description='dashboard_checker')
    my_parser.add_argument('-i',
                           '--instance',
                           action="store",
                           dest="instance",
                           type=str,
                           help='URL of the instance to process')
    my_parser.add_argument(
        '-df',
        '--dashboard_filter',
        action="store",
        dest="dashboard_filter",
        type=str,
        help='Either a prefix or a list of comma separated UIDs')
    my_parser.add_argument('--no_data_warning',
                           dest='no_data_warning',
                           action='store_true')
    my_parser.add_argument('--omit-no_data_warning',
                           dest='no_data_warning',
                           action='store_false')
    my_parser.add_argument('-v',
                           '--verbose',
                           dest='verbose',
                           action='store_true')
    my_parser.set_defaults(no_data_warning=True)
    my_parser.set_defaults(verbose=False)
    args = my_parser.parse_args()

    if args.instance is not None:
        instances = [{
            'name': args.instance.split('/')[-1].replace(':', '_'),
            'url': args.instance
        }]
    else:
        instances = [
            #{'name':'newdemos', 'url':'https://who-demos.dhis2.org/newdemos', 'SQL_view_TRK':'xfemQFHUTUV', 'SQL_view_AGG':'lg8lFbDMw2Z'}
            #{'name':'tracker_dev', 'url': 'https://who-dev.dhis2.org/tracker_dev', 'SQL_view_TRK': 'xfemQFHUTUV', 'SQL_view_AGG': 'lg8lFbDMw2Z'}
            {
                'name': 'covid-19',
                'url': 'https://demos.dhis2.org/covid-19',
                'SQL_view_TRK': 'xfemQFHUTUV',
                'SQL_view_AGG': 'lg8lFbDMw2Z'
            }
        ]

    log_file = "./dashboard_checker.log"
    setup_logger(log_file)

    credentials_file = './auth.json'

    df = pd.DataFrame({},
                      columns=[
                          'dashboard_name', 'type', 'uid', 'name', 'issue',
                          'api_link', 'app_link'
                      ])

    errors_found = 0

    for instance in instances:
        try:
            f = open(credentials_file)
        except IOError:
            print(
                "Please provide file auth.json with credentials for DHIS2 server"
            )
            exit(1)
        else:
            with open(credentials_file, 'r') as json_file:
                credentials = json.load(json_file)
            api_source = Api(instance['url'], credentials['dhis']['username'],
                             credentials['dhis']['password'])

        # Get dashboards
        params = {"fields": "*", "paging": "false"}
        if args.dashboard_filter is not None:
            item_list = args.dashboard_filter.split(',')
            if len(item_list) == 1 and not is_valid_uid(item_list[0]):
                params["filter"] = "name:$like:" + args.dashboard_filter
            # Let's consider it as a list of uids
            else:
                # Validate the list
                for item in item_list:
                    if not is_valid_uid(item):
                        logger.error("UID " + item +
                                     " is not a valid DHIS2 UID")
                        exit(1)
                params["filter"] = "id:in:[" + args.dashboard_filter + "]"

        dashboards = api_source.get('dashboards',
                                    params=params).json()['dashboards']

        dashboard_item_with_issues_row = dict()

        for dashboard in dashboards:
            logger.info('Processing dashboard ' + dashboard['name'])
            dashboard_item_with_issues_row['dashboard_name'] = dashboard[
                'name']
            if '2.33' not in api_source.version:
                dashboard_items = [
                    'visualization', 'eventReport', 'eventChart', 'map'
                ]
            else:
                dashboard_items = [
                    'chart', 'reportTable', 'eventReport', 'eventChart', 'map'
                ]
            for dashboardItem in dashboard['dashboardItems']:
                # The dashboard item could be of type TEXT, for example
                # in this case there is nothing to do
                dashboard_item_type_found = False
                for dashboard_item in dashboard_items:
                    if dashboard_item in dashboardItem:
                        dashboard_item_type_found = True
                        dashboard_item_with_issues_row['issue'] = ""
                        dashboard_item_with_issues_row['type'] = dashboard_item
                        dashboard_item_with_issues_row['uid'] = dashboardItem[
                            dashboard_item]['id']
                        dashboard_item_with_issues_row['name'] = ""
                        if args.verbose:
                            logger.info('Trying ' + dashboard_item + ' ' +
                                        dashboardItem[dashboard_item]['id'])
                        try:
                            api_endpoint = dashboard_item + 's/' + dashboardItem[
                                dashboard_item]['id']
                            dashboard_item_with_issues_row[
                                'api_link'] = instance[
                                    'url'] + '/api/' + api_endpoint
                            item = api_source.get(api_endpoint,
                                                  params={
                                                      "fields": "*"
                                                  }).json()
                        except RequestException as e:
                            logger.error(dashboard_item + ' ' +
                                         dashboardItem[dashboard_item]['id'] +
                                         " BROKEN with error " + str(e))
                            dashboard_item_with_issues_row['issue'] = str(e)
                            errors_found += 1
                        else:
                            dashboard_item_with_issues_row['name'] = item[
                                'name']
                            if dashboard_item in ['eventReport', 'eventChart']:
                                continue
                            # Try to get the data
                            try:
                                if dashboard_item == 'map':
                                    for map_view in item['mapViews']:
                                        params = build_analytics_payload(
                                            map_view, args.verbose)
                                        if params != {}:
                                            if 'layer' in map_view and map_view[
                                                    'layer'] == 'event' and 'program' in map_view:
                                                data = api_source.get(
                                                    'analytics/events/query/' +
                                                    map_view['program']['id'],
                                                    params=params).json()
                                            else:
                                                data = api_source.get(
                                                    'analytics',
                                                    params=params).json()
                                else:
                                    data = api_source.get(
                                        'analytics',
                                        params=build_analytics_payload(
                                            item, args.verbose)).json()
                            except RequestException as e:
                                logger.error(
                                    dashboard_item + ' ' +
                                    dashboardItem[dashboard_item]['id'] +
                                    " data cannot be retrieved with error " +
                                    str(e))
                                dashboard_item_with_issues_row['issue'] = str(
                                    e)
                                errors_found += 1
                            else:
                                # print(data['rows'])
                                if args.no_data_warning and (
                                        'rows' not in data
                                        or len(data['rows']) == 0):
                                    dashboard_item_with_issues_row[
                                        'issue'] = 'NO DATA'
                                    logger.warning(
                                        dashboardItem[dashboard_item]['id'] +
                                        ': NO DATA!!!')

                            #exit(0)

                if dashboard_item_type_found and dashboard_item_with_issues_row[
                        'issue'] != "":
                    if dashboard_item_with_issues_row[
                            'type'] == 'visualization':
                        dashboard_item_with_issues_row['app_link'] = instance['url'] + \
                                                                     '/dhis-web-data-visualizer/index.html#/' + \
                                                                     dashboard_item_with_issues_row['uid']
                    elif dashboard_item_with_issues_row['type'] == 'map':
                        dashboard_item_with_issues_row['app_link'] = instance['url'] + \
                                                                     '/dhis-web-maps/index.html'
                    elif dashboard_item_with_issues_row[
                            'type'] == 'eventReport':
                        dashboard_item_with_issues_row['app_link'] = instance['url'] + \
                                                                     'dhis-web-event-reports/index.html?id=' + \
                                                                     dashboard_item_with_issues_row['uid']
                    elif dashboard_item_with_issues_row[
                            'type'] == 'eventChart':
                        dashboard_item_with_issues_row['app_link'] = instance['url'] + \
                                                                     '/dhis-web-event-visualizer/index.html?id=' + \
                                                                     dashboard_item_with_issues_row['uid']
                    df = df.append(dashboard_item_with_issues_row,
                                   ignore_index=True)

    export_csv = df.to_csv(instance['name'] + '.csv', index=None, header=True)

    # Release log handlers
    handlers = logger.handlers[:]
    for handler in handlers:
        handler.close()
        logger.removeHandler(handler)

    return errors_found
示例#17
0
def build_analytics_payload(json_object, verbose=False):
    def get_group_set_dimensions(json_object, key):  # parent_key, child_key):
        parent_key = key + 'Set'
        child_key = key + 's'
        for grp_set_dimension in json_object:
            grp_set_dimension_uid = grp_set_dimension[parent_key]['id']
            if child_key in grp_set_dimension:
                group_set_list = list()
                for group in grp_set_dimension[child_key]:
                    group_set_list.append(group['id'])
                dimensions[grp_set_dimension_uid] = group_set_list
            else:
                dimensions[grp_set_dimension_uid] = ""

    dimensions = dict()
    dimensions['ou'] = list()
    dimensions['pe'] = list()
    dimensions['dx'] = list()
    if 'organisationUnits' not in json_object or len(
            json_object['organisationUnits']) == 0:
        ou_global_selections = {
            'userOrganisationUnit': 'USER_ORGUNIT',
            'userOrganisationUnitChildren': 'USER_ORGUNIT_CHILDREN',
            'userOrganisationUnitGrandChildren': 'USER_ORGUNIT_GRANDCHILDREN'
        }
        for ou_selection in ou_global_selections:
            if ou_selection in json_object and json_object[
                    ou_selection] == True:
                dimensions['ou'].append(ou_global_selections[ou_selection])

        if 'organisationUnitLevels' in json_object and len(
                json_object['organisationUnitLevels']) > 0:
            ou_level_list = list()
            for org_unit_level in json_object['organisationUnitLevels']:
                ou_level_list.append('LEVEL-' + str(org_unit_level))
            if len(ou_level_list) > 0:
                dimensions['ou'] += ou_level_list

        if 'itemOrganisationUnitGroups' in json_object and len(
                json_object['itemOrganisationUnitGroups']) > 0:
            ou_group_list = list()
            for org_unit_group in json_object['itemOrganisationUnitGroups']:
                ou_group_list.append('OU_GROUP-' + str(org_unit_group['id']))
            if len(ou_group_list) > 0:
                dimensions['ou'] += ou_group_list
    else:
        dimensions['ou'] = json_extract(json_object['organisationUnits'], 'id')

    if 'periods' not in json_object or len(json_object['periods']) == 0:
        if 'relativePeriods' in json_object:
            pe_global_selections = {
                'thisDay': 'TODAY',
                'thisWeek': 'THIS_WEEK',
                'thisMonth': 'THIS_MONTH',
                'thisQuarter': 'THIS_QUARTER',
                'thisYear': 'THIS_YEAR',
                'lastDay': 'LAST_DAY',
                'lastWeek': 'LAST_WEEK',
                'lastMonth': 'LAST_MONTH',
                'lastQuarter': 'LAST_QUARTER',
                'lastYear': 'LAST_YEAR',
                'last30Days': 'LAST_30_DAYS',
                'last52Weeks': 'LAST_52_WEEKS',
                'last90Days': 'LAST_90_DAYS',
                'last60Days': 'LAST_60_DAYS',
                'last14Days': 'LAST_14_DAYS',
                'last2SixMonths': 'LAST_2_SIXMONTHS',
                'last12Months': 'LAST_12_MONTHS',
                'last4Weeks': 'LAST_4_WEEKS',
                'last3Months': 'LAST_3_MONTHS',
                'last5Years': 'LAST_5_YEARS',
                'last6Months': 'LAST_6_MONTHS',
                'last3Days': 'LAST_3_DAYS',
                'last7Days': 'LAST_7_DAYS',
                'last180Days': 'LAST_180_DAYS',
                'last12Weeks': 'LAST_12_WEEKS',
                'last4Quarters': 'LAST_4_QUARTERS',
                'weeksThisYear': 'WEEKS_THIS_YEAR',
                'yesterday': 'YESTERDAY',
                'quartersLastYear': 'QUARTERS_LAST_YEAR',
                'monthsThisYear': 'MONTHS_THIS_YEAR',
                'biMonthsThisYear': 'BI_MONTHS_THIS_YEAR',
                'last5FinancialYears': 'LAST_5_FINANCIAL_YEARS',
                'thisSixMonth': 'THIS_SIX_MONTH',
                'thisFinancialYear': 'THIS_FINANCIAL_YEAR',
                'last6BiMonths': 'LAST_6_BI_MONTHS',
                'last4BiWeeks': 'LAST_6_BI_WEEKS',
                'lastFinancialYear': 'LAST_FINANCIAL_YEAR',
                'lastBiWeek': 'LAST_BI_WEEK',
                'quartersThisYear': 'QUARTERS_THIS_YEAR',
                'monthsLastYear': 'MONTHS_LAST_YEAR',
                'thisBimonth': 'THIS_BI_MONTH',
                'lastBimonth': 'LAST_BI_MONTH',
                'lastSixMonth': 'LAST_SIX_MONTH',
                'thisBiWeek': 'THIS_BI_WEEK',
                'last10Years': 'LAST_10_YEARS',
                'last10FinancialYears': 'LAST_10_FINANCIAL_YEARS'
            }
            for relative_period in json_object['relativePeriods']:
                if relative_period in pe_global_selections:
                    if json_object['relativePeriods'][relative_period]:
                        dimensions['pe'].append(
                            pe_global_selections[relative_period])
                else:
                    logger.error("Unknown relativePeriod " + relative_period)
                    exit(1)
            if len(dimensions['pe']) == 0 and 'periods' in json_object:
                dimensions['pe'] = json_extract(json_object['periods'], 'id')
        else:
            return {}
    else:
        dimensions['pe'] = json_extract(json_object['periods'], 'id')

    if len(dimensions['pe']
           ) == 0 and 'startDate' in json_object and 'endDate' in json_object:
        del dimensions['pe']

    if 'dataDimensionItems' in json_object and len(
            json_object['dataDimensionItems']) > 0:
        data_dimension_keys = {
            'PROGRAM_INDICATOR': 'programIndicator',
            'INDICATOR': 'indicator',
            'DATA_ELEMENT': 'dataElement',
            'REPORTING_RATE': 'reportingRate',
            'PROGRAM_DATA_ELEMENT': 'programDataElement'
        }
        for data_dimension in json_object['dataDimensionItems']:
            # Sometimes there are empty dimensions
            if data_dimension != {}:
                if data_dimension[
                        'dataDimensionItemType'] in data_dimension_keys:
                    # Special case, it joins to UIDs with a .
                    if data_dimension[
                            'dataDimensionItemType'] == 'PROGRAM_DATA_ELEMENT':
                        UID1 = data_dimension[data_dimension_keys[
                            data_dimension['dataDimensionItemType']]][
                                'program']['id']
                        UID2 = data_dimension[data_dimension_keys[
                            data_dimension['dataDimensionItemType']]][
                                'dataElement']['id']
                        dimensions['dx'].append(UID1 + '.' + UID2)
                    else:
                        data_dimension_uid = data_dimension[
                            data_dimension_keys[
                                data_dimension['dataDimensionItemType']]]['id']
                        if data_dimension[
                                'dataDimensionItemType'] == 'REPORTING_RATE':
                            # For reporting rates, we need to add the keyword to the id
                            dimensions['dx'].append(data_dimension_uid +
                                                    '.REPORTING_RATE')
                        else:
                            dimensions['dx'].append(data_dimension_uid)
                else:
                    logger.error('Unrecognized data dimension type ' +
                                 data_dimension['dataDimensionItemType'])
                    exit(1)

    dataElementDimension_filters = dict()
    if 'dataElementDimensions' in json_object and len(
            json_object['dataElementDimensions']) > 0:
        for data_element_dimension in json_object['dataElementDimensions']:
            data_element_dimension_uid = data_element_dimension['dataElement'][
                'id']
            if 'filter' in data_element_dimension:
                dimensions[
                    data_element_dimension_uid] = data_element_dimension[
                        'filter']
            else:
                dimensions[data_element_dimension_uid] = ""

    if 'categoryOptionGroupSetDimensions' in json_object and len(
            json_object['categoryOptionGroupSetDimensions']) > 0:
        get_group_set_dimensions(
            json_object['categoryOptionGroupSetDimensions'],
            'categoryOptionGroup')
    if 'organisationUnitGroupSetDimensions' in json_object and len(
            json_object['organisationUnitGroupSetDimensions']) > 0:
        get_group_set_dimensions(
            json_object['organisationUnitGroupSetDimensions'],
            'organisationUnitGroup')
    if 'dataElementGroupSetDimensions' in json_object and len(
            json_object['dataElementGroupSetDimensions']) > 0:
        get_group_set_dimensions(json_object['dataElementGroupSetDimensions'],
                                 'dataElementGroup')

    if 'categoryDimensions' in json_object and len(
            json_object['categoryDimensions']) > 0:
        for category_dimension in json_object['categoryDimensions']:
            category_dimension_uid = category_dimension['category']['id']
            category_options = list()
            for cat_options in category_dimension['categoryOptions']:
                category_options.append(cat_options['id'])
            dimensions[category_dimension_uid] = category_options

    # Build the payload
    payload = ""
    params = dict()
    payload += 'dimension='
    params['dimension'] = ""
    added_column_dimension = False
    if 'columns' in json_object:
        first_element = True
        for column in json_object['columns']:
            added_column_dimension = True
            if not first_element:
                payload += ','
                params['dimension'] += ','
            else:
                first_element = False
            key = column['id']
            if key in dimensions:
                if isinstance(dimensions[key], list):
                    right_expression = ';'.join(dimensions[key])
                else:
                    right_expression = dimensions[key]
                payload += key + ':' + right_expression
                params['dimension'] += key + ':' + right_expression
            else:
                if key == 'pe' and 'startDate' in json_object and 'endDate' in json_object:
                    payload += "&startDate=" + json_object[
                        'startDate'] + "&endDate=" + json_object['endDate']
                    params['startDate'] = json_object['startDate']
                    params['endDate'] = json_object['endDate']
                else:
                    logger.error(json_object['id'] + ': Dimension ' + key +
                                 ' is missing')
                    # exit(1)
    else:
        logger.error('columns missing')
        exit(1)

    # A very specific and strange case for maps
    # empty columns but styleDataItem is present. In that case, it gets added to the dimension
    if 'columns' in json_object and len(
            json_object['columns']) == 0 and 'styleDataItem' in json_object:
        payload += json_object['styleDataItem']['id']
        params['dimension'] += json_object['styleDataItem']['id']
        if 'rows' in json_object and len(json_object['rows']) > 0:
            payload += ','
            params['dimension'] += ','

    if 'rows' in json_object:
        if len(json_object['rows']) > 0:
            # If we have already added some stuff, separate it with a comma
            if added_column_dimension:
                payload += ','
                params['dimension'] += ','
            first_element = True
            for row in json_object['rows']:
                if not first_element:
                    payload += ','
                    params['dimension'] += ','
                else:
                    first_element = False
                key = row['id']
                if key in dimensions:
                    payload += key + ':' + ';'.join(dimensions[key])
                    params['dimension'] += key + ':' + ';'.join(
                        dimensions[key])
                else:
                    if key == 'pe' and 'startDate' in json_object and 'endDate' in json_object:
                        payload += "&startDate=" + json_object[
                            'startDate'] + "&endDate=" + json_object['endDate']
                        params['startDate'] = json_object['startDate']
                        params['endDate'] = json_object['endDate']
                    else:
                        logger.error(json_object['id'] + ': Dimension ' + key +
                                     ' is missing')
                        # exit(1)
    else:
        logger.error('rows missing')
        exit(1)

    if 'filters' in json_object:
        if len(json_object['filters']) > 0:
            payload += '&filter='
            params['filter'] = ""
            first_element = True
            for filter in json_object['filters']:
                if not first_element:
                    payload += ','
                    params['filter'] += ','
                else:
                    first_element = False
                key = filter['id']
                if key in dimensions:
                    payload += key + ':' + ';'.join(dimensions[key])
                    params['filter'] += key + ':' + ';'.join(dimensions[key])
                else:
                    if key == 'pe' and 'startDate' in json_object and 'endDate' in json_object:
                        payload += "&startDate=" + json_object[
                            'startDate'] + "&endDate=" + json_object['endDate']
                        params['startDate'] = json_object['startDate']
                        params['endDate'] = json_object['endDate']
                    else:
                        logger.error(json_object['id'] + ': Dimension ' + key +
                                     ' is missing')
                        # exit(1)
    else:
        logger.error('filters missing')
        exit(1)

    if 'programStage' in json_object:
        payload += '&stage' + ':' + json_object['programStage']['id']
        params['stage'] = json_object['programStage']['id']

    # Important, to get the data
    payload += "&skipData=false"
    params['skipData'] = 'false'

    if verbose:
        logger.info(payload)

    return params
示例#18
0
    "zeroIsSignificant": False,
    "favorite": False,
    "optionSetValue": False,
}

# First, create the data element
try:
    response = api.post('metadata',
                        params={
                            'mergeMode': 'REPLACE',
                            'importStrategy': 'CREATE_AND_UPDATE'
                        },
                        json={'dataElements': [dummy_data_de]})
except RequestException as e:
    # Print errors returned from DHIS2
    logger.error("POST failed with error " + str(e))
    exit()
else:
    print('Data element ' + de_uid + ' created')

# Get OU level 1
try:
    ou = api.get('organisationUnits',
                 params={
                     'fields': 'id,name',
                     'filter': 'level:eq:1'
                 }).json()['organisationUnits']
except RequestException as e:
    # Print errors returned from DHIS2
    logger.error("GET ou failed with error " + str(e))
    exit()
示例#19
0
def get_periods(frequency, start_date, end_date):
    # datetime.strptime(startDate, "%Y%m%d").strftime("%Y-%m-%d")
    dt_start = datetime.strptime(start_date, "%Y-%m-%d")
    dt_end = datetime.strptime(end_date, "%Y-%m-%d")
    periods = list()

    if dt_start < dt_end:
        if frequency.lower() == 'daily':  # yyyyMMdd
            for single_date in (dt_start + timedelta(n)
                                for n in range((dt_end - dt_start).days + 1)):
                periods.append(single_date.strftime("%Y%m%d"))

        elif frequency.lower() == 'weekly':  # yyyyWn
            single_date = dt_start
            while single_date <= dt_end:
                periods.append(
                    str(single_date.year) + 'W' +
                    str(single_date.isocalendar()[1]))
                single_date = single_date + timedelta(7)  # 7 days, a week

        elif frequency.lower() == 'monthly':  # yyyyMM
            start_month = dt_start.month
            end_month = dt_end.month
            start_year = dt_start.year
            end_year = dt_end.year
            for year in range(start_year, (end_year + 1)):
                last_month = 12
                first_month = 1
                if year == start_year:
                    first_month = start_month
                if year == end_year:
                    last_month = end_month
                for month in range(first_month, (last_month + 1)):
                    periods.append(str(year) + str(month).zfill(2))

        elif frequency.lower() == 'quarterly':  # yyyyQn
            start_quarter = (dt_start.month - 1) // 3 + 1
            end_quarter = (dt_end.month - 1) // 3 + 1
            start_year = dt_start.year
            end_year = dt_end.year
            for year in range(start_year, (end_year + 1)):
                last_quarter = 4
                first_quarter = 1
                if year == start_year:
                    first_quarter = start_quarter
                if year == end_year:
                    last_quarter = end_quarter
                for quarter in range(first_quarter, (last_quarter + 1)):
                    periods.append(str(year) + 'Q' + str(quarter))

        elif frequency.lower() == 'yearly':  # yyyy
            start_year = dt_start.year
            end_year = dt_end.year
            for year in range(start_year, (end_year + 1)):
                periods.append(str(year))
        else:
            logger.error("Period type = '" + frequency + "' not supported")
            exit(1)
    else:
        logger.error("Start date = '" + start_date +
                     "' is after end date = '" + end_date + "'")
        exit(1)

    return periods
示例#20
0
def main():

    logger.warning("Server source running DHIS2 version {} revision {}".format(
        api.version, api.revision))

    import argparse

    my_parser = argparse.ArgumentParser(
        prog='delete_TEIs',
        description='Delete all TEIs created by robot',
        epilog="",
        formatter_class=argparse.RawDescriptionHelpFormatter)
    my_parser.add_argument('Program_UID',
                           metavar='program_uid',
                           type=str,
                           help='the uid of the program to use')
    my_parser.add_argument(
        '-ou',
        '--org_unit',
        action="store",
        dest="OrgUnit",
        type=str,
        help=
        'Rather than deleting from the root of the tree, deletes from a specific orgUnit including descendants'
        'Eg: --ou=Q7RbNZcHrQ9')

    args = my_parser.parse_args()
    program_uid = args.Program_UID
    if not is_valid_uid(program_uid):
        logger.error('The program uid specified is not a valid DHIS2 uid')
        exit(1)
    else:
        try:
            program = api.get('programs/' + program_uid).json()
        except RequestException as e:
            if e.code == 404:
                logger.error('Program ' + program_uid +
                             ' specified does not exist')
                exit(1)

    ou = 'GD7TowwI46c'  # Trainingland
    if args.OrgUnit is not None:
        if not is_valid_uid(args.OrgUnit):
            logger.error('The orgunit uid specified is not a valid DHIS2 uid')
            exit(1)
        else:
            try:
                orgunit = api.get('organisationUnits/' + args.OrgUnit).json()
            except RequestException as e:
                if e.code == 404:
                    logger.error('Org Unit ' + args.OrgUnit +
                                 ' specified does not exist')
                    exit(1)
            else:
                ou = orgunit[0]

    params = {
        'ou': ou,
        'ouMode': 'DESCENDANTS',
        'program': program_uid,
        'skipPaging': 'true',
        #'lastUpdatedDuration': '4d',
        #'fields': '*'
        'fields': 'trackedEntityInstance,enrollments'
    }

    data = api.get('trackedEntityInstances',
                   params=params).json()['trackedEntityInstances']

    logger.info("Found " + str(len(data)) + " TEIs")

    user = '******'
    for tei in data:
        # #### Uncomment this to filter by user
        if 'enrollments' not in tei:
            import json
            logger.info(json.dumps(tei, indent=4))
        if tei["enrollments"][0]["storedBy"] != user:
            logger.warning("Skipping tei stored by " +
                           tei["enrollments"][0]["storedBy"])
            continue
        # ####
        tei_uid = tei['trackedEntityInstance']
        try:
            response = api.delete('trackedEntityInstances/' + tei_uid)
        except RequestException as e:
            logger.error(e)
            pass
        else:
            logger.info("TEI " + tei_uid + " removed")
示例#21
0
    my_parser.add_argument('-get', '--get_dict_from_form', action="store", metavar='file_name',
                           const='dictionary_' + generate_uid(), nargs='?',
                           help="Create dictionary of translatable string from custom forms in the instance."
                                "\nOptionally, you can pass the name of the output file to create."
                                "\nIf an existing xlsx file is provided, it creates a file _new.xlsx with updated keys and EN strings."
                                "\nEg: --get_dict_from_form=my_file_name")
    my_parser.add_argument('-post', '--update_form_from_dict', action="store", metavar='file_name', nargs=1,
                           help="Use dictionary in xlsx format to update translations in form"
                                "\nEg: --update_form_from_dict=my_file.xlsx")
    my_parser.add_argument('-gk', '--generate_keys', action='store_true',
                           help='This optional argument makes sure the keys are regenerated in the html form and the dict')

    args = my_parser.parse_args()

    if args.get_dict_from_form is None and args.update_form_from_dict is None:
        logger.error('Please specify at least one option. Try with -h to check for command line help')
        exit(1)
    mode = 'get'
    if args.get_dict_from_form is not None:
        logger.info("Creating dictionary")
        if '.xlsx' not in args.get_dict_from_form:
            output_file_name = args.get_dict_from_form + '.xlsx'
        print(output_file_name)
    elif args.update_form_from_dict is not None:
        mode = 'post'
        logger.info("Updating custom forms")
        input_file_name = args.update_form_from_dict[0]
        try:
            xls = pd.ExcelFile(input_file_name)
        except FileNotFoundError:
            logger.error('File ' + input_file_name + ' does not exist')