def test_can_sum_from_joined_column_passes(self): OperationStep.objects.create( operation=self.op, step_id=2, name='Join', query_func='join', query_kwargs= '{"table_name":"dac1_current","schema_name":"public", "join_on":{"year":"year"}\ ,"columns_x":["donor_name","usd_commitment"],"columns_y":["part_code","part_name"]}', source_id=2) OperationStep.objects.create( operation=self.op, step_id=3, name='Aggregate', query_func='aggregate', query_kwargs= '{"group_by":["part_name"],"agg_func_name":"Sum", "operational_column":"usd_commitment"}', source_id=2) queries = query.build_query(operation=self.op, limit=1, offset=0, estimate_count=True) _, dat = fetch_data(queries, database="default") self.assertTrue(dat[0]['usd_commitment_Sum'] > 150)
def test_can_generate_select_all(self): queries = query.build_query(operation=self.op, limit=1, offset=0, estimate_count=True) _, dat = fetch_data(queries, database="default") self.assertEqual(len(dat[0].keys()), 88)
def create(self, validated_data): try: read_only_fields = ('user', 'theme_name', 'tags', 'operationstep_set', 'review_set') read_only_dict = dict() for field in read_only_fields: if field in validated_data: read_only_dict[field] = validated_data.pop(field) operation = Operation.objects.create(**validated_data) operation.user = read_only_dict['user'] if operation.advanced_config and len( operation.advanced_config) > 0: operation.operation_query = query.get_advanced_config_query( operation.advanced_config) else: for step in read_only_dict['operationstep_set']: OperationStep.objects.create(operation=operation, **step) operation.operation_query = query.build_query( operation=operation) operation.count_rows = True if not 'is_draft' in validated_data: operation.is_draft = False operation.save() self.create_operation_data_aliases(operation) return operation except AliasCreationError: raise AliasCreationError( {'error_code': operation.alias_creation_status}) except Exception as e: handle_uncaught_error(e) raise CustomAPIException({'detail': str(e)})
def update(self, instance, validated_data): try: info = model_meta.get_field_info(instance) advanced_config = validated_data.get('advanced_config', None) if advanced_config and len(advanced_config) > 0: instance.name = validated_data.get('name') instance.description = validated_data.get('description') instance.is_draft = validated_data.get('is_draft') instance.advanced_config = advanced_config instance.operation_query = query.get_advanced_config_query( advanced_config) instance.save() else: updated_steps = validated_data.pop('operationstep_set') for attr, value in validated_data.items(): if attr in info.relations and info.relations[attr].to_many: field = getattr(instance, attr) field.set(value) else: setattr(instance, attr, value) instance.save() existing_steps = instance.operationstep_set.all() existing_step_ids = [step.step_id for step in existing_steps] for updated_step in updated_steps: updated_step_id = updated_step.get("step_id") if updated_step_id in existing_step_ids: existing_step_ids.remove(updated_step_id) updated_step_instance, _ = OperationStep.objects.get_or_create( operation=instance, step_id=updated_step_id) step_info = model_meta.get_field_info( updated_step_instance) for attr, value in updated_step.items(): if attr in step_info.relations and step_info.relations[ attr].to_many: field = getattr(updated_step_instance, attr) field.set(value) else: setattr(updated_step_instance, attr, value) updated_step_instance.save() for step_for_delete_id in existing_step_ids: step_for_delete = OperationStep.objects.get( operation=instance, step_id=step_for_delete_id) step_for_delete.delete() instance.operation_query = query.build_query( operation=instance) instance.count_rows = True instance.save() self.update_operation_data_aliases(instance) return instance except AliasUpdateError: raise AliasUpdateError( {'error_code': instance.alias_creation_status}) except Exception as e: handle_uncaught_error(e) raise CustomAPIException({'detail': str(e)})
def test_can_generate_filter(self): OperationStep.objects.create( operation=self.op, step_id=2, name="Filter", query_func="filter", query_kwargs= '{"filters":[{"field":"year", "value":1973, "func":"ge"},{"field":"short_description", "value":"%sector%|%wheat%", "func":"text_search"}]}', source_id=1) queries = query.build_query(operation=self.op) _, dat = fetch_data(queries, database="default") self.assertEqual(len(dat), 41)
def test_can_perform_multi_transform_divide_by_zero(self): OperationStep.objects.create( operation=self.op, step_id=2, name='Transform', query_func='multi_transform', query_kwargs= '{"trans_func_name":"divide", "operational_columns":["usd_disbursement","usd_disbursement_deflated"]}', source_id=2) queries = query.build_query(operation=self.op) _, dat = fetch_data(queries, database="default") self.assertTrue('usd_disbursement_divide' in dat[0].keys())
def test_can_generate_select_by_column(self): OperationStep.objects.create( operation=self.op, step_id=2, name="Select", query_func="select", query_kwargs="{ \"columns\": [ \"year\" ] }", source_id=1) queries = query.build_query(operation=self.op, limit=1, offset=0, estimate_count=True) _, dat = fetch_data(queries, database="default") self.assertEqual(len(dat[0].keys()), 1)
def test_can_catch_sql_err(self): OperationStep.objects.create( operation=self.op, step_id=2, name="Select", query_func="select", query_kwargs="{ \"columns\": [ \"iso10\" ] }", source_id=1) queries = query.build_query(operation=self.op, limit=1, offset=0, estimate_count=True) _, dat = fetch_data(queries, database="default") self.assertTrue("error" in list(dat[0].keys()))
def test_can_generate_join(self): OperationStep.objects.create( operation=self.op, step_id=2, name='Join', query_func='join', query_kwargs= '{"table_name":"dac1_current","schema_name":"public", "join_on":{"year":"year"}}', source_id=2) queries = query.build_query(operation=self.op, limit=1, offset=0, estimate_count=True) _, dat = fetch_data(queries, database="default") self.assertEqual(len(dat[0].keys()), 100)
def test_can_catch_zip_err(self): OperationStep.objects.create( operation=self.op, step_id=2, name="Filter", query_func="filter", query_kwargs= '{"filters":[{"field":"year", "value":9999, "func":"ge"}]}', source_id=1) queries = query.build_query(operation=self.op, limit=1, offset=0, estimate_count=True) _, dat = fetch_data(queries, database="default") self.assertEqual(len(dat), 0)
def test_can_sum(self): OperationStep.objects.create( operation=self.op, step_id=2, name='Aggregate', query_func='aggregate', query_kwargs= '{"group_by":["donor_code"],"agg_func_name":"Sum", "operational_column":"usd_commitment"}', source_id=2) queries = query.build_query(operation=self.op, limit=1, offset=0, estimate_count=True) _, dat = fetch_data(queries, database="default") self.assertTrue('usd_commitment_Sum' in dat[0].keys())
def test_can_generate_perform_dense_rank(self): OperationStep.objects.create( operation=self.op, step_id=2, name='Window denserank', query_func='window', query_kwargs= '{"window_fn":"DenseRank","order_by":["usd_commitment"]}', source_id=2) queries = query.build_query(operation=self.op, limit=1, offset=0, estimate_count=True) _, dat = fetch_data(queries, database="default") self.assertEqual(dat[0]["dense_rank"], 1)
def test_can_perform_last_value(self): OperationStep.objects.create( operation=self.op, step_id=2, name='Window last val', query_func='window', query_kwargs= '{"window_fn":"LastValue","term":"usd_commitment","order_by":["year"]}', source_id=2) queries = query.build_query(operation=self.op, limit=1, offset=0, estimate_count=True) _, dat = fetch_data(queries, database="default") self.assertTrue(dat[0]['last_value'] > 14)
def test_can_perform_scalar_transform(self): OperationStep.objects.create( operation=self.op, step_id=2, name='Transform', query_func='scalar_transform', query_kwargs= '{"trans_func_name":"text_search", "operational_column":"short_description", "operational_value":"%wheat%"}', source_id=2) queries = query.build_query(operation=self.op, limit=1, offset=0, estimate_count=True) _, dat = fetch_data(queries, database="default") self.assertTrue(dat[0]['short_description_text_search'])
def test_can_generate_avg_aggregate(self): OperationStep.objects.create( operation=self.op, step_id=2, name='Aggregate', query_func='aggregate', query_kwargs= '{"group_by":["year"],"agg_func_name":"Avg", "operational_column":"usd_commitment"}', source_id=2) queries = query.build_query(operation=self.op, limit=1, offset=0, estimate_count=True) _, dat = fetch_data(queries, database="default") self.assertTrue(dat[0]["usd_commitment_Avg"] > 9)
def test_can_perform_stddev(self): OperationStep.objects.create( operation=self.op, step_id=2, name='Window StdDev', query_func='window', query_kwargs= '{"window_fn":"StdDev","term":"usd_commitment","over":["year"]}', source_id=2) queries = query.build_query(operation=self.op, limit=1, offset=0, estimate_count=True) _, dat = fetch_data(queries, database="default") self.assertTrue(dat[0]['stddev'] > 8)
def test_can_generate_join_for_specific_columns(self): OperationStep.objects.create( operation=self.op, step_id=2, name='Join', query_func='join', query_kwargs= '{"table_name":"dac1_current","schema_name":"public", "join_on":{"year":"year"},\ "columns_x":["donor_name","usd_commitment"],"columns_y":["part_code","part_name"]}', source_id=2) queries = query.build_query(operation=self.op, limit=1, offset=0, estimate_count=True) _, dat = fetch_data(queries, database="default") self.assertEqual(len(dat[0].keys()), 90)
def test_can_generate_select_with_defined_limit(self): expected = 'SELECT * FROM "public"."crs_current" LIMIT 5 OFFSET 10' self.assertEqual(query.build_query(operation=self.op, limit=5, offset=10)[1], expected)
def __init__(self, operation, frozen_table_id=None): if operation.advanced_config: # We are dealing with advanced config here self.main_query = query.build_advanced_queries(operation.advanced_config)[1] else: self.main_query = query.build_query(operation=operation, frozen_table_id=frozen_table_id)[1] self.operation = operation
def test_can_generate_select_without_limit(self): expected = 'SELECT * FROM "public"."crs_current"' self.assertEqual(query.build_query(operation=self.op, offset=10)[1], expected)