Example #1
0
def create_dataset_from_zip(context, data_dict):
    upload = data_dict.get('upload')
    private = data_dict.get('private', True)

    map_package = upload.file

    tempdir = tempfile.mkdtemp('-mapactionzip')

    metadata_paths = []
    file_paths = []
    with zipfile.ZipFile(map_package, 'r') as z:
        z.extractall(tempdir)
        for f in z.namelist():
            full_path = os.path.join(tempdir, f)
            if f.endswith('.xml'):
                metadata_paths.append(full_path)
            else:
                file_paths.append(full_path)

    assert len(metadata_paths) == 1
    metadata_file = metadata_paths[0]

    et = parse(metadata_file)

    dataset_dict = {}

    owner_org = data_dict.get('owner_org')
    if owner_org:
        dataset_dict['owner_org'] = owner_org
    else:
        private = False

    dataset_dict['title'] = join_lines(et.find('.//mapdata/title').text)
    map_id = et.find('.//mapdata/ref').text
    operation_id = et.find('.//mapdata/operationID').text
    dataset_dict['name'] = slugify('%s %s' % (operation_id, map_id))
    dataset_dict['notes'] = join_lines(et.find('.//mapdata/summary').text)
    dataset_dict['private'] = private
    dataset_dict['extras'] = [{
        'key': k,
        'value': v
    } for (k, v) in metadataimporter.map_metadata_to_ckan_extras(et).items()]
    dataset = toolkit.get_action('package_create')(context, dataset_dict)

    for resource_file in file_paths:
        resource = {
            'package_id': dataset['id'],
            'path': resource_file,
        }
        _create_and_upload_local_resource(context, resource)

    return dataset
def create_dataset_from_zip(context, data_dict):
    upload = data_dict.get('upload')
    private = data_dict.get('private', True)

    map_package = upload.file

    tempdir = tempfile.mkdtemp('-mapactionzip')

    metadata_paths = []
    file_paths = []
    with zipfile.ZipFile(map_package, 'r') as z:
        z.extractall(tempdir)
        for f in z.namelist():
            full_path = os.path.join(tempdir, f)
            if f.endswith('.xml'):
                metadata_paths.append(full_path)
            else:
                file_paths.append(full_path)

    assert len(metadata_paths) == 1
    metadata_file = metadata_paths[0]

    et = parse(metadata_file)

    dataset_dict = {}

    owner_org = data_dict.get('owner_org')
    if owner_org:
        dataset_dict['owner_org'] = owner_org
    else:
        private = False

    dataset_dict['title'] = join_lines(et.find('.//mapdata/title').text)
    map_id = et.find('.//mapdata/ref').text
    operation_id = et.find('.//mapdata/operationID').text
    dataset_dict['name'] = slugify('%s %s' % (operation_id, map_id))
    dataset_dict['notes'] = join_lines(et.find('.//mapdata/summary').text)
    dataset_dict['private'] = private
    dataset_dict['extras'] = [
        {'key': k, 'value': v} for (k, v) in
            metadataimporter.map_metadata_to_ckan_extras(et).items()
    ]
    dataset = toolkit.get_action('package_create')(context, dataset_dict)

    for resource_file in file_paths:
        resource = {
            'package_id': dataset['id'],
            'path': resource_file,
        }
        _create_and_upload_local_resource(context, resource)

    return dataset
 def setUp(self):
     et = parse(custom_helpers.get_test_xml())
     self.extras_dict = metadataimporter.map_metadata_to_ckan_extras(et)
     self.assertTrue(len(self.extras_dict) > 0)
Example #4
0
 def setUp(self):
     et = parse(custom_helpers.get_test_xml())
     self.extras_dict = metadataimporter.map_metadata_to_ckan_extras(et)
     self.assertTrue(len(self.extras_dict) > 0)