Ejemplo n.º 1
0
 def messytables_load():
     try:
         loader.load_table(tmp_file.name,
                           resource_id=resource['id'],
                           mimetype=resource.get('format'),
                           logger=logger)
     except JobError as e:
         logger.error('Error during messytables load: {}'.format(e))
         raise
     loader.calculate_record_count(
         resource_id=resource['id'], logger=logger)
     set_datastore_active(data, resource, api_key, ckan_url, logger)
     logger.info('Finished loading with messytables')
Ejemplo n.º 2
0
 def messytables_load():
     try:
         loader.load_table(tmp_file.name,
                           resource_id=resource['id'],
                           resource_alias=resource['name'],
                           mimetype=resource.get('format'),
                           logger=logger)
     except JobError as e:
         logger.error('Error during messytables load: {}'.format(e))
         raise
     loader.calculate_record_count(
         resource_id=resource['id'], logger=logger)
     set_datastore_active(data, resource, logger)
     logger.info('Finished loading with messytables')
     update_resource(resource={'id': resource['id'], 'hash': resource['hash']},
                     patch_only=True)
     logger.info('File Hash updated for resource: {}'.format(resource['hash']))
Ejemplo n.º 3
0
 def direct_load():
     fields = loader.load_csv(
         tmp_file.name,
         resource_id=resource['id'],
         mimetype=resource.get('format'),
         logger=logger)
     loader.calculate_record_count(
         resource_id=resource['id'], logger=logger)
     set_datastore_active(data, resource, api_key, ckan_url, logger)
     job_dict['status'] = 'running_but_viewable'
     callback_xloader_hook(result_url=input['result_url'],
                           api_key=api_key,
                           job_dict=job_dict)
     logger.info('Data now available to users: {}'.format(resource_ckan_url))
     loader.create_column_indexes(
         fields=fields,
         resource_id=resource['id'],
         logger=logger)
Ejemplo n.º 4
0
    # hash isn't actually stored, so this is a bit worthless at the moment
    if (resource.get('hash') == file_hash and not data.get('ignore_hash')):
        logger.info('Ignoring resource - the file hash hasn\'t changed: '
                    '{hash}.'.format(hash=file_hash))
        return
    logger.info('File hash: {}'.format(file_hash))
    resource['hash'] = file_hash  # TODO write this back to the actual resource

    # Load it
    logger.info('Loading CSV')
    try:
        fields = loader.load_csv(tmp_file.name,
                                 resource_id=resource['id'],
                                 mimetype=resource.get('format'),
                                 logger=logger)
        loader.calculate_record_count(resource_id=resource['id'],
                                      logger=logger)
        set_datastore_active(data, resource, api_key, ckan_url, logger)
        job_dict['status'] = 'running_but_viewable'
        callback_xloader_hook(result_url=input['result_url'],
                              api_key=input['api_key'],
                              job_dict=job_dict)
        logger.info(
            'Data now available to users: {}'.format(resource_ckan_url))
        loader.create_column_indexes(fields=fields,
                                     resource_id=resource['id'],
                                     logger=logger)
    except JobError as e:
        logger.warning('Load using COPY failed: {}'.format(e))
        logger.info('Trying again with messytables')
        try:
            loader.load_table(tmp_file.name,