def after_update(self, context, pkg_dict): """Updates the datapackage.json using metastore-lib backend. After updating the package it calls metastore-lib to update the datapackage.json file in the GitHub repository. """ if pkg_dict['type'] == 'dataset': # We need to get a complete dict to also update resources data. # We need to save tracking_summary, required for templates. pkg_dict = toolkit.get_action('package_show')( {}, { 'id': pkg_dict['id'], 'include_tracking': True }) datapackage = dataset_to_frictionless(pkg_dict) backend = get_metastore_backend() author = create_author_from_context(context) pkg_info = backend.update(pkg_dict['name'], datapackage, author=author) log.info( 'Package {} updated correctly. Revision {} created.'.format( pkg_info.package_id, pkg_info.revision)) return pkg_dict
def test_resource_path_multiple_resources(): dataset = { "name": "my package", "resources": [{"url": "data/foo.csv", "name": "resource 1", "id": "resource-1"}, {"url": "https://example.com/data.csv", "name": "resource 2", "id": "resource-2"}, {"path": "an/existing/path.csv", "name": "resource 3", "id": "resource-3"}, {"name": "my-resource", "format": "xls", "sha256": SHA256, "id": "resource-4"}] } resources = datapackage.dataset_to_frictionless(dataset)['resources'] assert_equals(resources[0]['path'], 'data/foo.csv') assert_equals(resources[1]['path'], 'https://example.com/data.csv') assert_equals(resources[2]['path'], 'an/existing/path.csv') assert_equals(resources[3]['path'], 'my-resource.xls')
def test_resource_path_conflicting_paths_fixed(): dataset = { "name": "my package", "resources": [{"url": "data/foo.csv", "name": "resource 1", "id": "r-1"}, {"url": "data/bar.csv", "name": "resource 2", "id": "r-2"}, {"path": "data/foo.csv", "name": "resource 3", "id": "r-3"}, {"name": "data/foo", "format": "csv", "sha256": SHA256, "id": "r-4"}, {"path": "../data/foo.csv", "name": "resource 5", "id": "r-5"}] } resources = datapackage.dataset_to_frictionless(dataset)['resources'] assert_equals(resources[0]['path'], 'data/foo.csv') assert_equals(resources[1]['path'], 'data/bar.csv') assert_equals(resources[2]['path'], 'data/foo-2.csv') assert_equals(resources[3]['path'], 'data/foo-3.csv') assert_equals(resources[4]['path'], 'data/foo-4.csv')
def after_create(self, context, pkg_dict): """Creates a datapackage.json using metastore-lib backend. After creating the package, it calls metastore-lib to create a new GitHub repository a store the package dict in a datapackage.json file. """ if pkg_dict['type'] == 'dataset': datapackage = dataset_to_frictionless(pkg_dict) backend = get_metastore_backend() author = create_author_from_context(context) pkg_info = backend.create(pkg_dict['name'], datapackage, author=author) log.info( 'Package {} created correctly. Revision {} created.'.format( pkg_info.package_id, pkg_info.revision)) return pkg_dict
def test_resource_path_relative_dirs_normalization(input, expected): result = datapackage.dataset_to_frictionless({"name": "my package", "resources": [{"url": input, "id": "r-1"}]}) assert_equals(result['resources'][0].get('path'), expected)
def test_resource_path_is_added(resource, expected_path): result = datapackage.dataset_to_frictionless({"name": "my package", "resources": [resource]}) assert_equals(result['resources'][0].get('path'), expected_path)