Esempio n. 1
0
    def test_read_metadata(self):
        importer = MetadataImporter()
        source = SourceFactory(
            scraper_name='fake',
            scraperwiki_url='http://www.google.com/',
        )

        def side_metadata(*args):
            return [{
                'description': 'dati emergenza estratti da www.intoscana.it',
                'license': 'others',
                'tags': 'emergenza, carabinieri',
                'url': 'http://www.example.com/index.html',
                'curator': 'fondazione sistema toscana',
                'bounding_box': '42.24, 9.69, 44.47, 12.37',
                'other': None,
                'download': 'table://emergencies',
                'name': 'numeri di emergenza in toscana'
            }]

        with patch.object(Dataset, 'save'):
            with patch.object(MetadataImporter, 'get_metadata_of_scraper',
                              side_effect=side_metadata):
                self.assertEqual(
                    {'total': 1, 'errors': 0, 'report': []},
                    importer.read_metadata(source)
                )
Esempio n. 2
0
def source_fetch_metadata(request, pk):
    """
    Update metadata action.
    Imports metadata from the configured source scraper.
    """
    source = Source.objects.get(pk=pk)

    source.datasets.all().delete()

    try:
        if not source.scraper_name:
            raise Exception('A scraper name must be specified.')
        report = MetadataImporter.read_metadata(source)
    except URLError:
        logger.exception('Timeout while accessing scraper data')
        messages.error(request, "Timeout while accessing scraper data")
    except Exception:
        logger.exception('Error while updating metadata')
        messages.error(request, "Error while updating metadata")
    else:
        messages.info(
            request,
            "{} metadata imported, {} errors".format(
                report['total'], report['errors']
            )
        )

    return redirect(source)
Esempio n. 3
0
def source_fetch_metadata(request, pk):
    """
    Update metadata action.
    Imports metadata from the configured source scraper.
    """
    source = Source.objects.get(pk=pk)

    source.datasets.all().delete()

    try:
        if not source.scraper_name:
            raise Exception('A scraper name must be specified.')
        report = MetadataImporter.read_metadata(source)
    except URLError:
        logger.exception('Timeout while accessing scraper data')
        messages.error(request, "Timeout while accessing scraper data")
    except Exception:
        logger.exception('Error while updating metadata')
        messages.error(request, "Error while updating metadata")
    else:
        messages.info(
            request,
            "{} metadata imported, {} errors".format(report['total'],
                                                     report['errors']))

    return redirect(source)