Exemplo n.º 1
0
def to_expressions(activity, activity_code):
    expressions = OrderedDict()
    prefix = ''
    if activity_code:
        prefix = activity_code+"_"
    for state_code, state in Descriptor.as_items(activity.states):
        for source_code, source in Descriptor.as_items(state.sources):
            suffix = "_"+source_code
            if len(source.uids) > 1:
                state_expressions = []
                for idx in range(len(source.uids)):
                    state_expressions.append(
                        prefix+state_code+"_"+str(idx+1)+suffix)
                expressions[prefix+state_code+suffix] = state_expressions
    return expressions
Exemplo n.º 2
0
def to_mappings(activity, activity_code):
    mappings = OrderedDict()
    prefix = ''
    if activity_code:
        prefix = activity_code+"_"
    for state_code, state in Descriptor.as_items(activity.states):
        for source_code, source in Descriptor.as_items(state.sources):
            suffix = "_"+source_code
            if len(source.uids) == 1:
                mappings[source.uids[0]] = prefix+state_code+suffix
            else:
                for idx, uid in enumerate(source.uids):
                    mappings[uid] = prefix+state_code + \
                        "_" + str(idx+1) + suffix
    return mappings
Exemplo n.º 3
0
from mamba import description, context, it
from expects import expect, equal
import json
from blsqpy.descriptor import Descriptor


def json_fixture_content(filename):
    print("reading", filename)
    with open(filename+".json", encoding='utf-8') as f:
        return json.loads(f.read())


with description('Parsing json config') as self:
    with it('loads them as namedTupples'):
        config = Descriptor.load("./specs/fixtures/config/demo")
        expect(config.demo.test.hello).to(equal("world"))

    with it('loads them as namedTupples and normalize states'):
        config = Descriptor.load("./specs/fixtures/config/parametrized-raw")
        jsonDescriptor = Descriptor.to_json(config)
        print(jsonDescriptor)
        expect(json.loads(jsonDescriptor)).to(
            equal(
                json_fixture_content(
                    "./specs/fixtures/config/parametrized-final"))
        )
Exemplo n.º 4
0
    def dump_to_s3(self):
        for activity_code, activity in Descriptor.as_items(self.config.activities):
            if activity_code.startswith("DISABLED_"):
                continue

            self.dump(activity, activity_code)
Exemplo n.º 5
0
        pd.testing.assert_frame_equal(
            df,
            expected_df)

    with it("Dhis2Dumper.dumps"):
        sqls = {
            "SELECT datavalue.value,\norganisationunit.path,\nperiod.startdate as start_date, period.enddate as end_date, lower(periodtype.name) as frequency,\ndataelement.uid AS dataelementid, dataelement.name AS dataelementname,\ncategoryoptioncombo.uid AS CatComboID , categoryoptioncombo.name AS CatComboName,\ndataelement.created,\norganisationunit.uid as uidorgunit\nFROM datavalue\nJOIN dataelement ON dataelement.dataelementid = datavalue.dataelementid\nJOIN categoryoptioncombo ON categoryoptioncombo.categoryoptioncomboid = datavalue.categoryoptioncomboid\nJOIN organisationunit ON organisationunit.organisationunitid = datavalue.sourceid\nJOIN period ON period.periodid = datavalue.periodid\nJOIN periodtype ON periodtype.periodtypeid = period.periodtypeid\nWHERE\n    organisationunit.path like '/%'\n          and ( dataelement.uid='s4CxsmoqdRj') OR ( dataelement.uid='fSD1ZZo4hTs' AND categoryoptioncombo.uid='HllvX50cXC0')\n -- query : extract_data":
            {
                "file": "datavalues",
                "parse_dates": ['start_date', 'end_date'],
            }
        }
        pg_hook = MockHook.with_extra_sqls(sqls)
        mock_s3 = MockS3Hook()

        config = Descriptor.load("./specs/fixtures/config/dump")
        dumper = Dhis2Dumper(config, mock_s3, "bucket", pg_hook=pg_hook)
        dumper.dump_to_s3()
        expect(mock_s3.uploads).to(equal(
            ['bucket/export/play/extract_data_values_play_pills-raw.csv',
             'bucket/export/play/extract_data_values_play_pills'
             ]))

    with it("Dhis2Dumper.dumps"):
        pg_hook = MockHook.with_extra_sqls({})
        mock_s3 = MockS3Hook()

        config = Descriptor.load("./specs/fixtures/config/dump")
        dumper = Dhis2Dumper(config, mock_s3, "bucket", pg_hook=pg_hook)
        dumper.dump_organisation_units_structure()
        expect(mock_s3.uploads).to(equal(
Exemplo n.º 6
0
import pandas as pd
from mamba import description, context, it, before
from expects import expect, equal

from blsqpy.descriptor import Descriptor
import blsqpy.mapping as mapping
from pandas.testing import assert_frame_equal
from collections import OrderedDict

config = Descriptor.load("./specs/fixtures/config/sample")


def test_from_rotated_to_mapped(config, rotated_csv, mapped_csv):
    df = pd.read_csv("./specs/fixtures/extract/" + rotated_csv, sep=',')
    mapped_df = mapping.map_from_activity(df, config.activities.pills, "pills")
    print(df)
    print("*************** mapped_df")
    print(mapped_df)

    #mapped_df.to_csv("./specs/mapping/mapped.csv", sep=',')
    expected_mapped = pd.read_csv("./specs/fixtures/mapping/" + mapped_csv,
                                  sep=',')
    print("*************** expected_mapped")
    print(expected_mapped)

    assert_frame_equal(mapped_df.reset_index(drop=True),
                       expected_mapped,
                       check_dtype=False)


with description('mapping') as self:
Exemplo n.º 7
0
def to_data_elements(activity):
    data_elements = []
    for _state_code, state in Descriptor.as_items(activity.states):
        for _source_code, source in Descriptor.as_items(state.sources):
            data_elements.extend(source.uids)
    return data_elements
Exemplo n.º 8
0
import code

from blsqpy.s3export_hooks import S3ExportsHook
from blsqpy.postgres_hook import PostgresHook
from blsqpy.dhis2 import Dhis2

import numpy as np
import pandas as pd
import code

# Sample config usage

from blsqpy.descriptor import Descriptor
config = Descriptor.load("./specs/config/demo")
print("hello", config.demo.test.hello)


# S3  : listing and reading
s3 = S3ExportsHook("s3_readonly")
exports = s3.exports()

print("Available extracts")
for val in exports:
    print(val["Key"], "\t", val["Size"], "\t", str(val["LastModified"]))

# S3 get a specific file and use it as a dataframe

s3 = S3ExportsHook("s3_readonly")
s3.download_file(
    "export/datavalues_extract_data_values_dhis2_sn_cartesanitaire_pilule.csv", "./tmp/pilule.csv")