from mamba import description, context, it from expects import expect, equal import json from blsqpy.descriptor import Descriptor def json_fixture_content(filename): print("reading", filename) with open(filename+".json", encoding='utf-8') as f: return json.loads(f.read()) with description('Parsing json config') as self: with it('loads them as namedTupples'): config = Descriptor.load("./specs/fixtures/config/demo") expect(config.demo.test.hello).to(equal("world")) with it('loads them as namedTupples and normalize states'): config = Descriptor.load("./specs/fixtures/config/parametrized-raw") jsonDescriptor = Descriptor.to_json(config) print(jsonDescriptor) expect(json.loads(jsonDescriptor)).to( equal( json_fixture_content( "./specs/fixtures/config/parametrized-final")) )
import pandas as pd from mamba import description, context, it, before from expects import expect, equal from blsqpy.descriptor import Descriptor import blsqpy.mapping as mapping from pandas.testing import assert_frame_equal from collections import OrderedDict config = Descriptor.load("./specs/fixtures/config/sample") def test_from_rotated_to_mapped(config, rotated_csv, mapped_csv): df = pd.read_csv("./specs/fixtures/extract/" + rotated_csv, sep=',') mapped_df = mapping.map_from_activity(df, config.activities.pills, "pills") print(df) print("*************** mapped_df") print(mapped_df) #mapped_df.to_csv("./specs/mapping/mapped.csv", sep=',') expected_mapped = pd.read_csv("./specs/fixtures/mapping/" + mapped_csv, sep=',') print("*************** expected_mapped") print(expected_mapped) assert_frame_equal(mapped_df.reset_index(drop=True), expected_mapped, check_dtype=False) with description('mapping') as self:
pd.testing.assert_frame_equal( df, expected_df) with it("Dhis2Dumper.dumps"): sqls = { "SELECT datavalue.value,\norganisationunit.path,\nperiod.startdate as start_date, period.enddate as end_date, lower(periodtype.name) as frequency,\ndataelement.uid AS dataelementid, dataelement.name AS dataelementname,\ncategoryoptioncombo.uid AS CatComboID , categoryoptioncombo.name AS CatComboName,\ndataelement.created,\norganisationunit.uid as uidorgunit\nFROM datavalue\nJOIN dataelement ON dataelement.dataelementid = datavalue.dataelementid\nJOIN categoryoptioncombo ON categoryoptioncombo.categoryoptioncomboid = datavalue.categoryoptioncomboid\nJOIN organisationunit ON organisationunit.organisationunitid = datavalue.sourceid\nJOIN period ON period.periodid = datavalue.periodid\nJOIN periodtype ON periodtype.periodtypeid = period.periodtypeid\nWHERE\n organisationunit.path like '/%'\n and ( dataelement.uid='s4CxsmoqdRj') OR ( dataelement.uid='fSD1ZZo4hTs' AND categoryoptioncombo.uid='HllvX50cXC0')\n -- query : extract_data": { "file": "datavalues", "parse_dates": ['start_date', 'end_date'], } } pg_hook = MockHook.with_extra_sqls(sqls) mock_s3 = MockS3Hook() config = Descriptor.load("./specs/fixtures/config/dump") dumper = Dhis2Dumper(config, mock_s3, "bucket", pg_hook=pg_hook) dumper.dump_to_s3() expect(mock_s3.uploads).to(equal( ['bucket/export/play/extract_data_values_play_pills-raw.csv', 'bucket/export/play/extract_data_values_play_pills' ])) with it("Dhis2Dumper.dumps"): pg_hook = MockHook.with_extra_sqls({}) mock_s3 = MockS3Hook() config = Descriptor.load("./specs/fixtures/config/dump") dumper = Dhis2Dumper(config, mock_s3, "bucket", pg_hook=pg_hook) dumper.dump_organisation_units_structure() expect(mock_s3.uploads).to(equal(
import code from blsqpy.s3export_hooks import S3ExportsHook from blsqpy.postgres_hook import PostgresHook from blsqpy.dhis2 import Dhis2 import numpy as np import pandas as pd import code # Sample config usage from blsqpy.descriptor import Descriptor config = Descriptor.load("./specs/config/demo") print("hello", config.demo.test.hello) # S3 : listing and reading s3 = S3ExportsHook("s3_readonly") exports = s3.exports() print("Available extracts") for val in exports: print(val["Key"], "\t", val["Size"], "\t", str(val["LastModified"])) # S3 get a specific file and use it as a dataframe s3 = S3ExportsHook("s3_readonly") s3.download_file( "export/datavalues_extract_data_values_dhis2_sn_cartesanitaire_pilule.csv", "./tmp/pilule.csv")