def _update_package_with_errors(self, fixture_name, changeset):
 
        pkg_dict = package_fixtures[fixture_name]['0']
        wrong_pkg_dict = package_fixtures[fixture_name][changeset]
        pkg_name = package_fixtures[fixture_name]['0']['name']
        
        res1 = self.app.get('/dataset/edit/%s' % pkg_name)
        assert res1.status == 200
        
        key_prefix = dt = package_fixtures[fixture_name]['0']['dataset_type']

        # Edit core metadata
        
        form1 = res1.forms['package-form']
        
        for k in self.basic_fields & set(wrong_pkg_dict.keys()):
            v = pkg_dict[k]
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form1.set(k, v)
        if 'tags' in wrong_pkg_dict:
            form1.set('tag_string', ','.join(map(lambda t: t['name'], wrong_pkg_dict['tags'] or [])))
        
        # Edit dataset_type-related metadata

        for t, v in flatten(wrong_pkg_dict[dt]).items():
            k = '.'.join((key_prefix,) + tuple(map(str,t)))
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form1.set(k, v)
        
        # Submit invalid data

        res1s = form1.submit('save', status='*')
        # Note a validation error leads to an HTTP 200 and the form is re-rendered
        assert res1s.status in [200]
        
        pq1s = pyquery.PyQuery(res1s.body.decode('utf-8'))
        pq1s_errors = pq1s.find('.errors.error-messages')
        assert pq1s_errors
        error_keys = [el.attrib['data-key'] for el in pq1s_errors.find("li .title")]
        for t in flatten(wrong_pkg_dict[dt]):
            k = '.'.join((key_prefix,) + tuple(map(str,t)))
            assert k in error_keys

        # Fill the correct values and re-submit

        form2 = res1.forms['package-form']
       
        for t, v in flatten(pkg_dict[dt]).items():
            k = '.'.join((key_prefix,) + tuple(map(str,t)))
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            log1.info('form2(2): Setting %r to (correct) value %r', k, v)
            form2.set(k, v)

        res2s = form2.submit('save', status='*')
        assert res2s.status in [301, 302]
        
        res2 = res2s.follow()
        assert res2.status in [200]      
        assert res2.request.url == '/dataset/%s' %(pkg_name)
def preprocess_dataset_for_edit(key, data, errors, context):
    assert key[0] == '__before', \
        'This validator can only be invoked in the __before stage'
    
    def debug(msg):
        logger.debug('Pre-processing dataset for editing: %s' %(msg))
    
    received_data = { k:v for k,v in data.iteritems() if not (v is missing) }
    unexpected_data = received_data.get(('__extras',), {})
    
    #debug('Received data: %r' %(received_data))
    #debug('Received (but unexpected) data: %r' %(unexpected_data))
    
    # Figure out if a nested dict is supplied (instead of a flat one).
    
    # Note This "nested" input format is intended to be used by the action api,
    # as it is far more natural to the JSON format. Still, this format option is
    # not restricted to api requests (it is possible to be used even by form-based
    # requests).
    
    key_prefix = dtype = received_data.get(('dataset_type',))
    r = unexpected_data.get(dtype) if dtype else None
    if isinstance(r, dict) and (dtype in ext_metadata.dataset_types):
        # Looks like a nested dict keyed at key_prefix
        debug('Trying to flatten input at %s' %(key_prefix))
        if any([ k[0].startswith(key_prefix) for k in received_data ]):
            raise Invalid('Not supported: Found both nested/flat dicts')
        # Convert to expected flat fields
        key_converter = lambda k: '.'.join([key_prefix] + map(str, k))
        r = dictization.flatten(r, key_converter)
        data.update({ (k,): v for k, v in r.iteritems() })

    #raise Breakpoint('preprocess_dataset_for_edit')
    pass
示例#3
0
def preprocess_dataset_for_edit(key, data, errors, context):
    assert key[0] == '__before', \
        'This validator can only be invoked in the __before stage'

    def debug(msg):
        logger.debug('Pre-processing dataset for editing: %s' % (msg))

    received_data = {k: v for k, v in data.iteritems() if not (v is missing)}
    unexpected_data = received_data.get(('__extras', ), {})

    #debug('Received data: %r' %(received_data))
    #debug('Received (but unexpected) data: %r' %(unexpected_data))

    # Figure out if a nested dict is supplied (instead of a flat one).

    # Note This "nested" input format is intended to be used by the action api,
    # as it is far more natural to the JSON format. Still, this format option is
    # not restricted to api requests (it is possible to be used even by form-based
    # requests).

    key_prefix = dtype = received_data.get(('dataset_type', ))
    r = unexpected_data.get(dtype) if dtype else None
    if isinstance(r, dict) and (dtype in ext_metadata.dataset_types):
        # Looks like a nested dict keyed at key_prefix
        debug('Trying to flatten input at %s' % (key_prefix))
        if any([k[0].startswith(key_prefix) for k in received_data]):
            raise Invalid('Not supported: Found both nested/flat dicts')
        # Convert to expected flat fields
        key_converter = lambda k: '.'.join([key_prefix] + map(str, k))
        r = dictization.flatten(r, key_converter)
        data.update({(k, ): v for k, v in r.iteritems()})

    #raise Breakpoint('preprocess_dataset_for_edit')
    pass
def _test_dictize_update_shallow(fixture_name, dtype, changeset):
    '''Test from_dict in shallow-update mode
    ''' 
    
    x0 = getattr(fixtures, fixture_name)
    df0 = x0.to_dict(flat=1, opts={'serialize-keys': 1})
    
    d = updates[dtype][changeset]
    df = flatten(d, lambda k: '.' . join(map(str, k)))
    
    x1 = copy.deepcopy(x0)
    x1.from_dict(d, is_flat=0, opts={ 'update': True })
    df1 = x1.to_dict(flat=1, opts={'serialize-keys': 1})

    for k in (set(x0.get_fields()) - set(d.keys())):
        assert_equal(getattr(x1,k), getattr(x0,k))
 
    for change, key, desc in dictdiffer.diff(df0, df1):
        if change == 'change':
            val0, val1 = desc
            assert ((val1 is None) and not (key in df)) or df[key] == val1
            assert df1[key] == val1
            assert df0[key] == val0
        elif change == 'add':
            for key1, val1 in desc:
                assert ((val1 is None) and not (key1 in df)) or df[key1] == val1
                assert df1[key1] == val1
                assert not key1 in df0
        elif change == 'remove':
            for key0, val0 in desc:
                assert df0[key0] == val0
                assert not key0 in df1
                assert not (key0 in df) or (df[key0] is None) 

    pass
示例#5
0
def _test_dictize_update_deep(fixture_name, dtype, changeset):
    '''Test from_dict in deep-update mode
    '''

    x0 = getattr(fixtures, fixture_name)
    df0 = x0.to_dict(flat=1, opts={'serialize-keys': 1})

    d = updates[dtype][changeset]
    df = flatten(d, lambda k: '.'.join(map(str, k)))

    x2 = copy.deepcopy(x0)
    x2.from_dict(d, is_flat=0, opts={'update': 'deep'})

    for k in (set(x0.get_fields()) - set(d.keys())):
        assert_equal(getattr(x2, k), getattr(x0, k))

    df2 = x2.to_dict(flat=1, opts={'serialize-keys': 1})

    def is_reloaded(k):
        # Check if a None was replaced (in d) with a non-empty thing
        if (df0[k] is None) and dot_lookup(d, k):
            return True
        # Check if is forced to be reloaded via its parent
        kp = k.split('.')[:-1]
        while kp:
            f = x0.get_field(kp)
            if not f.queryTaggedValue('allow-partial-update', True):
                return True
            kp.pop()
        return False

    for change, key, desc in dictdiffer.diff(df0, df2):
        if change == 'change':
            val0, val2 = desc
            assert (val2 is None and not (key in df)) or (val2 == df[key])
            assert val0 == df0[key]
            assert val2 == df2[key]
        elif change == 'add':
            for key2, val2 in desc:
                assert (val2 is None) or (key2 in df)
                assert df2[key2] == val2
        elif change == 'remove':
            for key0, val0 in desc:
                # A key may be removed in the following cases
                #  - by setting its update to None (df value)
                #  - an ancestor or self was fully reloaded
                assert ((key0 in df) and
                        (df[key0] is None)) or is_reloaded(key0)
                assert df0[key0] == val0
    pass
def _test_dictize_update_discard_junk(fixture_name, dtype, changeset):
    
    key_prefix = 'fooo'

    x0 = getattr(fixtures, fixture_name)
    assert isinstance(x0, types.FooMetadata)
    d = updates[dtype][changeset]

    # Prepend a key-prefix, and then insert some junk items into df
    
    df = flatten(d, lambda k: '.' . join(map(str, k)))

    af = []
    af.extend([ ('%s.%s' % (key_prefix, k), v) for k, v in df.items() ])
    af.extend([ ('a', 99), ('junk.1', 'something'), ('z.aa', 100), (key_prefix, 'baobab') ])
    df = dict(af)

    # Load   
    
    x1 = copy.deepcopy(x0)
    x1.from_dict(d, is_flat=0, opts={})

    x2 = copy.deepcopy(x0)
    x2.from_dict(df, is_flat=1, opts={
        'unserialize-keys': 1, 'key-prefix': key_prefix })
    
    assert x1 == x2
    
    # Update in shallow mode 
    
    x1 = copy.deepcopy(x0)
    x1.from_dict(d, is_flat=0, opts={ 'update': 1 })

    x2 = copy.deepcopy(x0)
    x2.from_dict(df, is_flat=1, opts={
        'update': 1, 'unserialize-keys': 1, 'key-prefix': key_prefix })
    
    assert x1 == x2
   
    # Update in deep mode 
    
    x1 = copy.deepcopy(x0)
    x1.from_dict(d, is_flat=0, opts={ 'update': 'deep' })

    x2 = copy.deepcopy(x0)
    x2.from_dict(df, is_flat=1, opts={
        'update': 'deep', 'unserialize-keys': 1, 'key-prefix': key_prefix })
 
    assert x1 == x2
def _test_dictize_update_deep(fixture_name, dtype, changeset):
    '''Test from_dict in deep-update mode
    ''' 
    
    x0 = getattr(fixtures, fixture_name)
    df0 = x0.to_dict(flat=1, opts={'serialize-keys': 1})
    
    d = updates[dtype][changeset]
    df = flatten(d, lambda k: '.' . join(map(str, k)))
     
    x2 = copy.deepcopy(x0)
    x2.from_dict(d, is_flat=0, opts={ 'update': 'deep' })
    
    for k in (set(x0.get_fields()) - set(d.keys())):
        assert_equal(getattr(x2,k), getattr(x0,k))
   
    df2 = x2.to_dict(flat=1, opts={'serialize-keys': 1})

    def is_reloaded(k):
        # Check if a None was replaced (in d) with a non-empty thing
        if (df0[k] is None) and dot_lookup(d, k):
            return True
        # Check if is forced to be reloaded via its parent
        kp = k.split('.')[:-1]
        while kp:
            f = x0.get_field(kp)
            if not f.queryTaggedValue('allow-partial-update', True):
                return True
            kp.pop()
        return False

    for change, key, desc in dictdiffer.diff(df0, df2):
        if change == 'change':
            val0, val2 = desc
            assert (val2 is None and not (key in df)) or (val2 == df[key])
            assert val0 == df0[key]
            assert val2 == df2[key]
        elif change == 'add':
            for key2, val2 in desc:
                assert (val2 is None) or (key2 in df)
                assert df2[key2] == val2
        elif change == 'remove':
            for key0, val0 in desc:
                # A key may be removed in the following cases
                #  - by setting its update to None (df value)
                #  - an ancestor or self was fully reloaded
                assert ((key0 in df) and (df[key0] is None)) or is_reloaded(key0)
                assert df0[key0] == val0
    pass
    def _update_package(self, fixture_name, changeset):

        assert fixture_name in package_fixtures
        pkg_dict = package_fixtures[fixture_name][changeset]
        pkg_name = package_fixtures[fixture_name]['0']['name']

        res1 = self.app.get('/dataset/edit/%s' % pkg_name)
        assert res1.status == 200

        dt = package_fixtures[fixture_name]['0']['dataset_type']
        dt_spec = dataset_types[dt]
        key_prefix = dt_spec.get('key_prefix', dt)

        # Edit core metadata

        form1 = res1.forms['package-form']

        for k in self.basic_fields & set(pkg_dict.keys()):
            v = pkg_dict[k]
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form1.set(k, v)
        if 'tags' in pkg_dict:
            form1.set(
                'tag_string',
                ','.join(map(lambda t: t['name'], pkg_dict['tags'] or [])))

        # Edit dataset_type-related metadata

        for t, v in flatten(pkg_dict.get(dt)).items():
            k = '.'.join((key_prefix, ) + t)
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form1.set(k, v)

        # Submit

        res1s = form1.submit('save', status='*')
        assert res1s.status in [301, 302]

        res2 = res1s.follow()
        assert res2.status in [200]
        assert res2.request.url == '/dataset/%s' % (pkg_name)
    def _update_package(self, fixture_name, changeset):
 
        assert fixture_name in package_fixtures
        pkg_dict = package_fixtures[fixture_name][changeset]
        pkg_name = package_fixtures[fixture_name]['0']['name']
        
        res1 = self.app.get('/dataset/edit/%s' % pkg_name)
        assert res1.status == 200
        
        key_prefix = dt = package_fixtures[fixture_name]['0']['dataset_type']

        # Edit core metadata
        
        form1 = res1.forms['package-form']

        for k in self.basic_fields & set(pkg_dict.keys()):
            v = pkg_dict[k]
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form1.set(k, v)
        if 'tags' in pkg_dict:
            form1.set('tag_string', ','.join(map(lambda t: t['name'], pkg_dict['tags'] or [])))
        
        # Edit dataset_type-related metadata

        for t, v in flatten(pkg_dict.get(dt)).items():
            k = '.'.join((key_prefix,) + t)
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form1.set(k, v)
        
        # Submit

        res1s = form1.submit('save', status='*')
        assert res1s.status in [301, 302]
        
        res2 = res1s.follow()
        assert res2.status in [200]      
        assert res2.request.url == '/dataset/%s' %(pkg_name)
示例#10
0
def _test_dictize_update_shallow(fixture_name, dtype, changeset):
    '''Test from_dict in shallow-update mode
    '''

    x0 = getattr(fixtures, fixture_name)
    df0 = x0.to_dict(flat=1, opts={'serialize-keys': 1})

    d = updates[dtype][changeset]
    df = flatten(d, lambda k: '.'.join(map(str, k)))

    x1 = copy.deepcopy(x0)
    x1.from_dict(d, is_flat=0, opts={'update': True})
    df1 = x1.to_dict(flat=1, opts={'serialize-keys': 1})

    for k in (set(x0.get_fields()) - set(d.keys())):
        assert_equal(getattr(x1, k), getattr(x0, k))

    for change, key, desc in dictdiffer.diff(df0, df1):
        if change == 'change':
            val0, val1 = desc
            assert ((val1 is None) and not (key in df)) or df[key] == val1
            assert df1[key] == val1
            assert df0[key] == val0
        elif change == 'add':
            for key1, val1 in desc:
                assert ((val1 is None)
                        and not (key1 in df)) or df[key1] == val1
                assert df1[key1] == val1
                assert not key1 in df0
        elif change == 'remove':
            for key0, val0 in desc:
                assert df0[key0] == val0
                assert not key0 in df1
                assert not (key0 in df) or (df[key0] is None)

    pass
示例#11
0
        u'created': u'12-Dec-2014',
        u'samples': {
            'a': [ 1.4 ,7.6, 9.7, 5.9, 5.0, 9.1, 11.3, ],
            'b': [ 4.9 ],
            'c': {
                # Here, unflatten() should detect a list 
                u'0': 99,
                u'1': 100,
                u'2': 199,
            },
        },
    },
    u'author': u'lalakis',
}

d1 = dictization.flatten(d)

d2 = dictization.unflatten(d1)

def test_flattened_1():
    for k in sorted(d1):
        v = d1.get(k)
        assert isinstance(v, basestring) or isinstance(v, float) or isinstance(v, int), \
            '%r is not scalar' %(v)

@raises(AssertionError)
def test_inversed_1():
    s0 = json.dumps(d)
    s2 = json.dumps(d2)
    # Should fail because d[u'measurements'][u'samples']['c'] is converted to a list
    eq_(s0, s2)
    def _update_package_with_errors(self, fixture_name, changeset):

        pkg_dict = package_fixtures[fixture_name]['0']
        wrong_pkg_dict = package_fixtures[fixture_name][changeset]
        pkg_name = package_fixtures[fixture_name]['0']['name']

        res1 = self.app.get('/dataset/edit/%s' % pkg_name)
        assert res1.status == 200

        dt = package_fixtures[fixture_name]['0']['dataset_type']
        dt_spec = dataset_types[dt]
        key_prefix = dt_spec.get('key_prefix', dt)

        # Edit core metadata

        form1 = res1.forms['package-form']

        for k in self.basic_fields & set(wrong_pkg_dict.keys()):
            v = pkg_dict[k]
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form1.set(k, v)
        if 'tags' in wrong_pkg_dict:
            form1.set(
                'tag_string',
                ','.join(map(lambda t: t['name'], wrong_pkg_dict['tags']
                             or [])))

        # Edit dataset_type-related metadata

        for t, v in flatten(wrong_pkg_dict[dt]).items():
            k = '.'.join((key_prefix, ) + tuple(map(str, t)))
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form1.set(k, v)

        # Submit invalid data

        res1s = form1.submit('save', status='*')
        # Note a validation error leads to an HTTP 200 and the form is re-rendered
        assert res1s.status in [200]

        pq1s = pyquery.PyQuery(res1s.body.decode('utf-8'))
        pq1s_errors = pq1s.find('.errors.error-messages')
        assert pq1s_errors
        error_keys = [
            el.attrib['data-key'] for el in pq1s_errors.find("li .title")
        ]
        for t in flatten(wrong_pkg_dict[dt]):
            k = '.'.join((key_prefix, ) + tuple(map(str, t)))
            assert k in error_keys

        # Fill the correct values and re-submit

        form2 = res1.forms['package-form']

        for t, v in flatten(pkg_dict[dt]).items():
            k = '.'.join((key_prefix, ) + tuple(map(str, t)))
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            log1.info('form2(2): Setting %r to (correct) value %r', k, v)
            form2.set(k, v)

        res2s = form2.submit('save', status='*')
        assert res2s.status in [301, 302]

        res2 = res2s.follow()
        assert res2.status in [200]
        assert res2.request.url == '/dataset/%s' % (pkg_name)
示例#13
0
 def flatten_errors(self, errors):
     ''' Convert an <errors> structure to a flattened dict '''
     error_dict = self._dictize_errors(errors)
     return dictization.flatten(error_dict)
    def _create_package_with_errors(self, fixture_name, changeset):

        pkg_dict = package_fixtures[fixture_name]['0']
        wrong_pkg_dict = package_fixtures[fixture_name][changeset]
        pkg_name = pkg_dict['name']

        res1 = self.app.get('/dataset/new', status='*')
        assert res1.status == 200

        dt = pkg_dict['dataset_type']
        dt_spec = dataset_types[dt]
        key_prefix = dt_spec.get('key_prefix', dt)

        # 1st stage

        form1 = res1.forms['package-form']

        for k in ['title', 'name', 'dataset_type', 'notes', 'license_id']:
            v = pkg_dict.get(k)
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form1.set(k, v)
        form1.set('tag_string',
                  ','.join(map(lambda t: t['name'], pkg_dict.get('tags', []))))

        res1s = form1.submit('save')
        assert res1s.status in [301, 302]

        # 2nd stage

        res2 = res1s.follow()

        resource_dict = next(iter(pkg_dict['resources']))  # 1st resource
        form2 = res2.forms['resource-form']
        for k in ['description', 'url', 'name', 'format']:
            v = resource_dict.get(k)
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form2.set(k, v)

        btns = form2.fields.get('save')
        i2 = next(j for j, b in enumerate(btns)
                  if b.id == 'btn-save-go-metadata')
        res2s = form2.submit('save', index=i2, status='*')
        assert res2s.status in [301, 302]

        # 3rd stage - core metadata

        res3 = res2s.follow()

        form3 = res3.forms['package-form']
        for k in [
                'version', 'url', 'author', 'author_email', 'maintainer',
                'maintainer_email'
        ]:
            v = pkg_dict.get(k)
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form3.set(k, v)

        # 3rd stage - dataset_type-related metadata

        for t, v in chain(
                flatten(pkg_dict[dt]).items(),
                flatten(wrong_pkg_dict[dt]).items()):
            k = '.'.join((key_prefix, ) + tuple(map(str, t)))
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form3.set(k, v)

        btns = form3.fields.get('save')
        i3 = next(j for j, b in enumerate(btns) if b.id == 'btn-save-finish')
        res3s = form3.submit('save', index=i3, status='*')
        # Note a validation error leads to an HTTP 200 and the form is re-rendered
        assert res3s.status in [200]

        pq3s = pyquery.PyQuery(res3s.body.decode('utf-8'))
        pq3s_errors = pq3s.find('.errors.error-messages')
        assert pq3s_errors
        error_keys = [
            el.attrib['data-key'] for el in pq3s_errors.find("li .title")
        ]
        for t in flatten(wrong_pkg_dict[dt]):
            k = '.'.join((key_prefix, ) + tuple(map(str, t)))
            assert k in error_keys

        # Fill the correct values and re-submit

        form4 = res3s.forms['package-form']

        for k in [
                'version', 'url', 'author', 'author_email', 'maintainer',
                'maintainer_email'
        ]:
            v = pkg_dict.get(k)
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form4.set(k, v)

        for t, v in flatten(wrong_pkg_dict[dt]).items():
            k = '.'.join((key_prefix, ) + tuple(map(str, t)))
            form4.set(k, '')

        for t, v in flatten(pkg_dict[dt]).items():
            k = '.'.join((key_prefix, ) + tuple(map(str, t)))
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form4.set(k, v)

        res3s = form4.submit('save', index=i3, status='*')
        assert res3s.status in [301, 302]

        # Finished, return to "view" page

        res4 = res3s.follow()
        assert res4.status in [200]
        assert res4.request.url == '/dataset/%s' % (pkg_name)

        # Compare to package_show result

        res_dict = self._get_package(pkg_name)

        assert res_dict['dataset_type'] == dt
        for k in (self.basic_fields & set(res_dict.keys())):
            assert not (k in pkg_dict) or (res_dict[k] == pkg_dict[k])

        pkg_dt_dict = flatten(pkg_dict[dt])
        res_dt_dict = flatten(res_dict[dt])
        for k in pkg_dt_dict.keys():
            assert res_dt_dict[k] == pkg_dt_dict[k]

        pass
    def _create_package_with_errors(self, fixture_name, changeset):
        
        pkg_dict = package_fixtures[fixture_name]['0'] 
        wrong_pkg_dict = package_fixtures[fixture_name][changeset] 
        pkg_name = pkg_dict['name']

        res1 = self.app.get('/dataset/new', status='*')
        assert res1.status == 200
        
        key_prefix = dt = pkg_dict['dataset_type']

        # 1st stage
    
        form1 = res1.forms['package-form'] 

        for k in ['title', 'name', 'dataset_type', 'notes', 'license_id']:
            v = pkg_dict.get(k)
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form1.set(k, v)
        form1.set('tag_string', ','.join(map(lambda t: t['name'], pkg_dict.get('tags', []))))
        
        res1s = form1.submit('save')
        assert res1s.status in [301, 302] 
        
        # 2nd stage

        res2 = res1s.follow()
        
        resource_dict = next(iter(pkg_dict['resources'])) # 1st resource
        form2 = res2.forms['resource-form']
        for k in ['description', 'url', 'name', 'format']:
            v = resource_dict.get(k)
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form2.set(k, v)
        
        btns = form2.fields.get('save')
        i2 = next(j for j, b in enumerate(btns) if b.id == 'btn-save-go-metadata')
        res2s = form2.submit('save', index=i2, status='*')
        assert res2s.status in [301, 302]

        # 3rd stage - core metadata

        res3 = res2s.follow()
        
        form3 = res3.forms['package-form']
        for k in ['version', 'author', 'author_email', 'maintainer', 'maintainer_email']:
            v = pkg_dict.get(k)
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form3.set(k, v)

        # 3rd stage - dataset_type-related metadata

        for t, v in chain(
                flatten(pkg_dict[dt]).items(), flatten(wrong_pkg_dict[dt]).items()):
            k = '.'.join((key_prefix,) + tuple(map(str,t)))
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form3.set(k, v)

        btns = form3.fields.get('save')
        i3 = next(j for j, b in enumerate(btns) if b.id == 'btn-save-finish')
        res3s = form3.submit('save', index=i3, status='*')
        # Note a validation error leads to an HTTP 200 and the form is re-rendered
        assert res3s.status in [200]
       
        pq3s = pyquery.PyQuery(res3s.body.decode('utf-8'))
        pq3s_errors = pq3s.find('.errors.error-messages')
        assert pq3s_errors
        error_keys = [el.attrib['data-key'] for el in pq3s_errors.find("li .title")]
        for t in flatten(wrong_pkg_dict[dt]):
            k = '.'.join((key_prefix,) + tuple(map(str,t)))
            assert k in error_keys

        # Fill the correct values and re-submit
        
        form4 = res3s.forms['package-form']
        
        for k in ['version', 'author', 'author_email', 'maintainer', 'maintainer_email']:
            v = pkg_dict.get(k)
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form4.set(k, v)

        for t, v in flatten(wrong_pkg_dict[dt]).items():
            k = '.'.join((key_prefix,) + tuple(map(str,t)))
            form4.set(k, '')

        for t, v in flatten(pkg_dict[dt]).items():
            k = '.'.join((key_prefix,) + tuple(map(str,t)))
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form4.set(k, v)
         
        res3s = form4.submit('save', index=i3, status='*')
        assert res3s.status in [301, 302]

        # Finished, return to "view" page
        
        res4 = res3s.follow()
        assert res4.status in [200]
        assert res4.request.url == '/dataset/%s' %(pkg_name)

        # Compare to package_show result

        res_dict = self._get_package(pkg_name)

        assert res_dict['dataset_type'] == dt
        for k in (self.basic_fields & set(res_dict.keys())):
            assert not (k in pkg_dict) or (res_dict[k] == pkg_dict[k])

        pkg_dt_dict = flatten(pkg_dict[dt])
        res_dt_dict = flatten(res_dict[dt])
        for k in pkg_dt_dict.keys():
            assert res_dt_dict[k] == pkg_dt_dict[k]

        pass
        u"created": u"12-Dec-2014",
        u"samples": {
            "a": [1.4, 7.6, 9.7, 5.9, 5.0, 9.1, 11.3],
            "b": [4.9],
            "c": {
                # Here, unflatten() should detect a list
                u"0": 99,
                u"1": 100,
                u"2": 199,
            },
        },
    },
    u"author": u"lalakis",
}

d1 = dictization.flatten(d)

d2 = dictization.unflatten(d1)


def test_flattened_1():
    for k in sorted(d1):
        v = d1.get(k)
        assert isinstance(v, basestring) or isinstance(v, float) or isinstance(v, int), "%r is not scalar" % (v)


@raises(AssertionError)
def test_inversed_1():
    s0 = json.dumps(d)
    s2 = json.dumps(d2)
    # Should fail because d[u'measurements'][u'samples']['c'] is converted to a list
示例#17
0
def _test_dictize_update_discard_junk(fixture_name, dtype, changeset):

    key_prefix = 'fooo'

    x0 = getattr(fixtures, fixture_name)
    assert isinstance(x0, types.FooMetadata)
    d = updates[dtype][changeset]

    # Prepend a key-prefix, and then insert some junk items into df

    df = flatten(d, lambda k: '.'.join(map(str, k)))

    af = []
    af.extend([('%s.%s' % (key_prefix, k), v) for k, v in df.items()])
    af.extend([('a', 99), ('junk.1', 'something'), ('z.aa', 100),
               (key_prefix, 'baobab')])
    df = dict(af)

    # Load

    x1 = copy.deepcopy(x0)
    x1.from_dict(d, is_flat=0, opts={})

    x2 = copy.deepcopy(x0)
    x2.from_dict(df,
                 is_flat=1,
                 opts={
                     'unserialize-keys': 1,
                     'key-prefix': key_prefix
                 })

    assert x1 == x2

    # Update in shallow mode

    x1 = copy.deepcopy(x0)
    x1.from_dict(d, is_flat=0, opts={'update': 1})

    x2 = copy.deepcopy(x0)
    x2.from_dict(df,
                 is_flat=1,
                 opts={
                     'update': 1,
                     'unserialize-keys': 1,
                     'key-prefix': key_prefix
                 })

    assert x1 == x2

    # Update in deep mode

    x1 = copy.deepcopy(x0)
    x1.from_dict(d, is_flat=0, opts={'update': 'deep'})

    x2 = copy.deepcopy(x0)
    x2.from_dict(df,
                 is_flat=1,
                 opts={
                     'update': 'deep',
                     'unserialize-keys': 1,
                     'key-prefix': key_prefix
                 })

    assert x1 == x2
    def _create_package(self, fixture_name):
        
        pkg_dict = package_fixtures[fixture_name]['0'] 
        pkg_name = pkg_dict['name']

        res1 = self.app.get('/dataset/new', status='*')
        assert res1.status == 200
        
        key_prefix = dt = pkg_dict['dataset_type']

        # 1st stage
    
        form1 = res1.forms['package-form'] 

        for k in ['title', 'name', 'dataset_type', 'notes', 'license_id']:
            v = pkg_dict.get(k)
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form1.set(k, v)
        form1.set('tag_string', ','.join(map(lambda t: t['name'], pkg_dict.get('tags', []))))
        
        res1s = form1.submit('save')
        assert res1s.status in [301, 302] 
        
        # 2nd stage

        res2 = res1s.follow()
        
        resource_dict = next(iter(pkg_dict['resources'])) # 1st resource
        form2 = res2.forms['resource-form']
        for k in ['description', 'url', 'name', 'format']:
            v = resource_dict.get(k)
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form2.set(k, v)
        
        btns = form2.fields.get('save')
        i2 = next(j for j, b in enumerate(btns) if b.id == 'btn-save-go-metadata')
        res2s = form2.submit('save', index=i2, status='*')
        assert res2s.status in [301, 302]

        # 3rd stage - core metadata

        res3 = res2s.follow()
        
        form3 = res3.forms['package-form']
        for k in ['version', 'author', 'author_email', 'maintainer', 'maintainer_email']:
            v = pkg_dict.get(k)
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form3.set(k, v)

        # 3rd stage - dataset_type-related metadata

        for t, v in flatten(pkg_dict.get(dt)).items():
            k = '.'.join((key_prefix,) + tuple(map(str,t)))
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form3.set(k, v)

        btns = form3.fields.get('save')
        i3 = next(j for j, b in enumerate(btns) if b.id == 'btn-save-finish')
        res3s = form3.submit('save', index=i3, status='*')
        assert res3s.status in [301, 302]

        # Finished, return to "view" page
        
        res4 = res3s.follow()
        assert res4.status in [200]
        assert res4.request.url == '/dataset/%s' %(pkg_name)

        # Compare to package_show result

        res_dict = self._get_package(pkg_name)

        assert res_dict['dataset_type'] == dt

        for k in (self.basic_fields & set(res_dict.keys())):
            assert (not k in pkg_dict) or res_dict[k] == pkg_dict[k]
        pkg_dt_dict = flatten(pkg_dict.get(dt))
        res_dt_dict = flatten(res_dict.get(dt))
        for k in pkg_dt_dict.keys():
            assert res_dt_dict[k] == pkg_dt_dict[k]

        return
    def _create_package(self, fixture_name):

        pkg_dict = package_fixtures[fixture_name]['0']
        pkg_name = pkg_dict['name']

        res1 = self.app.get('/dataset/new', status='*')
        assert res1.status == 200

        dt = pkg_dict['dataset_type']
        dt_spec = dataset_types[dt]
        key_prefix = dt_spec.get('key_prefix', dt)

        # 1st stage

        form1 = res1.forms['package-form']

        for k in ['title', 'name', 'dataset_type', 'notes', 'license_id']:
            v = pkg_dict.get(k)
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form1.set(k, v)
        form1.set('tag_string',
                  ','.join(map(lambda t: t['name'], pkg_dict.get('tags', []))))

        res1s = form1.submit('save')
        assert res1s.status in [301, 302]

        # 2nd stage

        res2 = res1s.follow()

        resource_dict = next(iter(pkg_dict['resources']))  # 1st resource
        form2 = res2.forms['resource-form']
        for k in ['description', 'url', 'name', 'format']:
            v = resource_dict.get(k)
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form2.set(k, v)

        btns = form2.fields.get('save')
        i2 = next(j for j, b in enumerate(btns)
                  if b.id == 'btn-save-go-metadata')
        res2s = form2.submit('save', index=i2, status='*')
        assert res2s.status in [301, 302]

        # 3rd stage - core metadata

        res3 = res2s.follow()

        form3 = res3.forms['package-form']
        for k in [
                'version', 'url', 'author', 'author_email', 'maintainer',
                'maintainer_email'
        ]:
            v = pkg_dict.get(k)
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form3.set(k, v)

        # 3rd stage - dataset_type-related metadata

        for t, v in flatten(pkg_dict.get(dt)).items():
            k = '.'.join((key_prefix, ) + tuple(map(str, t)))
            v = v.encode('utf-8') if isinstance(v, unicode) else v
            form3.set(k, v)

        btns = form3.fields.get('save')
        i3 = next(j for j, b in enumerate(btns) if b.id == 'btn-save-finish')
        res3s = form3.submit('save', index=i3, status='*')
        assert res3s.status in [301, 302]

        # Finished, return to "view" page

        res4 = res3s.follow()
        assert res4.status in [200]
        assert res4.request.url == '/dataset/%s' % (pkg_name)

        # Compare to package_show result

        res_dict = self._get_package(pkg_name)

        assert res_dict['dataset_type'] == dt

        for k in (self.basic_fields & set(res_dict.keys())):
            assert res_dict[k] == pkg_dict[k]
        pkg_dt_dict = flatten(pkg_dict.get(dt))
        res_dt_dict = flatten(res_dict.get(dt))
        for k in pkg_dt_dict.keys():
            assert res_dt_dict[k] == pkg_dt_dict[k]

        return
示例#20
0
 def flatten_errors(self, errors):
     ''' Convert an <errors> structure to a flattened dict '''
     error_dict = self._dictize_errors(errors)
     return dictization.flatten(error_dict)
 def _check_if_changed(cls, d1, d2, expected_keys={}):
     d1f = dictization.flatten(d1)
     d2f = dictization.flatten(d2)
     for k in (set(d1f.keys()) - set(expected_keys)):
         assert d1f[k] == d2f[k], 'Expected not to be changed!'