Beispiel #1
0
def explore_iati(request, pk):
    context, db_data, error = explore_data_context(request, pk, get_file_type)
    if error:
        return error
    # The cached path is inside the media directory for this data, so will be
    # removed along with that after the expiry
    cached_context_path = os.path.join(db_data.upload_dir(),
                                       "cached_context.json")
    if cached_context_path == db_data.original_file.file.name:
        raise PermissionError(
            'You are not allowed to upload a file with this name.')
    if request.POST:
        cached = False
    else:
        try:
            with open(cached_context_path) as fp:
                context = json.load(fp)
            cached = True
        except (json.decoder.JSONDecodeError, FileNotFoundError):
            cached = False
    if not cached:
        context, db_data, error = explore_data_context_iati(
            request, context, db_data, error)
        if error:
            return error
        else:
            with open(cached_context_path, 'w') as fp:
                json.dump(context, fp)
    return render(request, 'cove_iati/explore.html', context)
Beispiel #2
0
def explore_360(request, pk, template='cove_360/explore.html'):
    schema_360 = Schema360()
    context, db_data, error = explore_data_context(request, pk)
    if error:
        return error

    lib_cove_config = LibCoveConfig()
    lib_cove_config.config.update(settings.COVE_CONFIG)

    upload_dir = db_data.upload_dir()
    upload_url = db_data.upload_url()
    file_name = db_data.original_file.file.name
    file_type = context['file_type']

    if file_type == 'json':
        # open the data first so we can inspect for record package
        with open(file_name, encoding='utf-8') as fp:
            try:
                json_data = json.load(fp, parse_float=Decimal)
            except ValueError as err:
                raise CoveInputDataError(context={
                    'sub_title': _("Sorry, we can't process that data"),
                    'link': 'index',
                    'link_text': _('Try Again'),
                    'msg': _(format_html('We think you tried to upload a JSON file, but it is not well formed JSON.'
                             '\n\n<span class="glyphicon glyphicon-exclamation-sign" aria-hidden="true">'
                             '</span> <strong>Error message:</strong> {}', err)),
                    'error': format(err)
                })
            if not isinstance(json_data, dict):
                raise CoveInputDataError(context={
                    'sub_title': _("Sorry, we can't process that data"),
                    'link': 'index',
                    'link_text': _('Try Again'),
                    'msg': _('360Giving JSON should have an object as the top level, the JSON you supplied does not.'),
                })

            context.update(convert_json(upload_dir, upload_url, file_name, schema_url=schema_360.release_schema_url,
                                        request=request, flatten=request.POST.get('flatten'),
                                        lib_cove_config=lib_cove_config))

    else:
        context.update(convert_spreadsheet(upload_dir, upload_url, file_name, file_type, lib_cove_config, schema_360.release_schema_url,
                                           schema_360.release_pkg_schema_url))
        with open(context['converted_path'], encoding='utf-8') as fp:
            json_data = json.load(fp, parse_float=Decimal)

    context = common_checks_360(context, upload_dir, json_data, schema_360)

    if hasattr(json_data, 'get') and hasattr(json_data.get('grants'), '__iter__'):
        context['grants'] = json_data['grants']
    else:
        context['grants'] = []

    context['first_render'] = not db_data.rendered
    if not db_data.rendered:
        db_data.rendered = True
    db_data.save()

    return render(request, template, context)
Beispiel #3
0
def explore_360(request, pk, template='cove_360/explore.html'):
    schema_360 = Schema360()
    context, db_data, error = explore_data_context(request, pk)
    if error:
        return error

    upload_dir = db_data.upload_dir()
    upload_url = db_data.upload_url()
    file_name = db_data.original_file.file.name
    file_type = context['file_type']

    if file_type == 'json':
        # open the data first so we can inspect for record package
        with open(file_name, encoding='utf-8') as fp:
            try:
                json_data = json.load(fp, parse_float=Decimal)
            except ValueError as err:
                raise CoveInputDataError(context={
                    'sub_title': _("Sorry, we can't process that data"),
                    'link': 'index',
                    'link_text': _('Try Again'),
                    'msg': _(format_html('We think you tried to upload a JSON file, but it is not well formed JSON.'
                             '\n\n<span class="glyphicon glyphicon-exclamation-sign" aria-hidden="true">'
                             '</span> <strong>Error message:</strong> {}', err)),
                    'error': format(err)
                })
            if not isinstance(json_data, dict):
                raise CoveInputDataError(context={
                    'sub_title': _("Sorry, we can't process that data"),
                    'link': 'index',
                    'link_text': _('Try Again'),
                    'msg': _('360Giving JSON should have an object as the top level, the JSON you supplied does not.'),
                })

            context.update(convert_json(upload_dir, upload_url, file_name, schema_url=schema_360.release_schema_url,
                                        request=request, flatten=request.POST.get('flatten')))

    else:
        context.update(convert_spreadsheet(upload_dir, upload_url, file_name, file_type, schema_360.release_schema_url, schema_360.release_pkg_schema_url))
        with open(context['converted_path'], encoding='utf-8') as fp:
            json_data = json.load(fp, parse_float=Decimal)

    context = common_checks_360(context, upload_dir, json_data, schema_360)

    if hasattr(json_data, 'get') and hasattr(json_data.get('grants'), '__iter__'):
        context['grants'] = json_data['grants']
    else:
        context['grants'] = []

    context['first_render'] = not db_data.rendered
    if not db_data.rendered:
        db_data.rendered = True
    db_data.save()

    return render(request, template, context)
Beispiel #4
0
def explore_iati(request, pk):
    context, db_data, error = explore_data_context(request, pk)
    if error:
        return error

    file_type = context['file_type']
    if file_type != 'xml':
        context.update(convert_spreadsheet(request, db_data, file_type, xml=True))
        data_file = context['converted_path']
        # sort converted xml
        sort_iati_xml_file(context['converted_path'], context['converted_path'])
    else:
        data_file = db_data.original_file.file.name

    context.update(common_checks_context_iati(db_data, data_file, file_type))
    return render(request, 'cove_iati/explore.html', context)
Beispiel #5
0
def explore_360(request, pk, template='cove_360/explore.html'):
    schema_360 = Schema360()
    context, db_data, error = explore_data_context(request, pk)
    if error:
        return error
    file_type = context['file_type']

    if file_type == 'json':
        # open the data first so we can inspect for record package
        with open(db_data.original_file.file.name, encoding='utf-8') as fp:
            try:
                json_data = json.load(fp)
            except ValueError as err:
                raise CoveInputDataError(
                    context={
                        'sub_title':
                        _("Sorry we can't process that data"),
                        'link':
                        'index',
                        'link_text':
                        _('Try Again'),
                        'msg':
                        _('We think you tried to upload a JSON file, but it is not well formed JSON.'
                          '\n\n<span class="glyphicon glyphicon-exclamation-sign" aria-hidden="true">'
                          '</span> <strong>Error message:</strong> {}'.format(
                              err)),
                        'error':
                        format(err)
                    })
            context.update(
                convert_json(request, db_data, schema_360.release_schema_url))
    else:
        context.update(
            convert_spreadsheet(request, db_data, file_type,
                                schema_360.release_schema_url))
        with open(context['converted_path'], encoding='utf-8') as fp:
            json_data = json.load(fp)

    context = common_checks_360(context, db_data, json_data, schema_360)
    return render(request, template, context)
Beispiel #6
0
def explore_iati(request, pk):
    context, db_data, error = explore_data_context(request, pk, get_file_type)
    if error:
        return error

    file_type = context['file_type']
    if file_type != 'xml':
        schema_iati = SchemaIATI()
        context.update(
            convert_spreadsheet(db_data.upload_dir(),
                                db_data.upload_url(),
                                db_data.original_file.file.name,
                                file_type,
                                xml=True,
                                xml_schemas=[
                                    schema_iati.activity_schema,
                                    schema_iati.organisation_schema,
                                    schema_iati.common_schema,
                                ]))
        data_file = context['converted_path']
    else:
        data_file = db_data.original_file.file.name
        context.update(
            convert_json(db_data.upload_dir(),
                         db_data.upload_url(),
                         db_data.original_file.file.name,
                         request=request,
                         flatten=request.POST.get('flatten'),
                         xml=True))

    context = common_checks_context_iati(context, db_data.upload_dir(),
                                         data_file, file_type)
    context['first_render'] = not db_data.rendered

    if not db_data.rendered:
        db_data.rendered = True

    return render(request, 'cove_iati/explore.html', context)
Beispiel #7
0
def explore_iati(request, pk):
    context, db_data, error = explore_data_context(request, pk)
    if error:
        return error

    file_type = context['file_type']
    if file_type != 'xml':
        schema_iati = SchemaIATI()
        context.update(convert_spreadsheet(db_data.upload_dir(), db_data.upload_url(), db_data.original_file.file.name,
                       file_type, schema_iati.activity_schema, xml=True))
        data_file = context['converted_path']
        # sort converted xml
        sort_iati_xml_file(context['converted_path'], context['converted_path'])
    else:
        data_file = db_data.original_file.file.name

    context = common_checks_context_iati(context, db_data.upload_dir(), data_file, file_type)
    context['first_render'] = not db_data.rendered

    if not db_data.rendered:
        db_data.rendered = True

    return render(request, 'cove_iati/explore.html', context)
Beispiel #8
0
def explore_iati(request, pk):
    context, db_data, error = explore_data_context(request, pk, get_file_type)
    if error:
        return error

    lib_cove_config = LibCoveConfig()
    lib_cove_config.config.update(settings.COVE_CONFIG)

    file_type = context['file_type']
    if file_type != 'xml':
        schema_iati = SchemaIATI()
        context.update(convert_spreadsheet(
            db_data.upload_dir(), db_data.upload_url(), db_data.original_file.file.name,
            file_type, lib_cove_config, xml=True,
            xml_schemas=[
                schema_iati.activity_schema,
                schema_iati.organisation_schema,
                schema_iati.common_schema,
            ]))
        data_file = context['converted_path']
    else:
        data_file = db_data.original_file.file.name
        context.update(convert_json(db_data.upload_dir(), db_data.upload_url(), db_data.original_file.file.name,
                       request=request, flatten=request.POST.get('flatten'), xml=True, lib_cove_config=lib_cove_config))

    tree = get_tree(data_file)
    context = common_checks_context_iati(context, db_data.upload_dir(), data_file, file_type, tree)
    context['first_render'] = not db_data.rendered
    context['invalid_embedded_codelist_values'] = aggregate_results(context['invalid_embedded_codelist_values'])
    context['invalid_non_embedded_codelist_values'] = aggregate_results(context['invalid_non_embedded_codelist_values'])
    context['iati_identifiers_count'] = iati_identifier_count(tree)
    context['organisation_identifier_count'] = organisation_identifier_count(tree)

    if not db_data.rendered:
        db_data.rendered = True

    return render(request, 'cove_iati/explore.html', context)
Beispiel #9
0
def explore_ocds(request, pk):
    context, db_data, error = explore_data_context(request, pk)
    if error:
        return error

    lib_cove_ocds_config = LibCoveOCDSConfig()
    lib_cove_ocds_config.config["current_language"] = translation.get_language(
    )
    lib_cove_ocds_config.config[
        "schema_version_choices"] = settings.COVE_CONFIG[
            "schema_version_choices"]
    lib_cove_ocds_config.config["schema_codelists"] = settings.COVE_CONFIG[
        "schema_codelists"]

    upload_dir = db_data.upload_dir()
    upload_url = db_data.upload_url()
    file_name = db_data.original_file.file.name
    file_type = context["file_type"]

    post_version_choice = request.POST.get("version")
    replace = False
    validation_errors_path = os.path.join(upload_dir,
                                          "validation_errors-3.json")

    if file_type == "json":
        # open the data first so we can inspect for record package
        with open(file_name, encoding="utf-8") as fp:
            try:
                json_data = json.load(fp,
                                      parse_float=Decimal,
                                      object_pairs_hook=OrderedDict)
            except UnicodeError as err:
                raise CoveInputDataError(
                    context={
                        'sub_title':
                        _("Sorry, we can't process that data"),
                        'link':
                        'index',
                        'link_text':
                        _('Try Again'),
                        'msg':
                        format_html(
                            _("The file that you uploaded doesn't appear to be well formed JSON. OCDS JSON follows the I-JSON format, which requires UTF-8 encoding. Ensure that your file uses UTF-8 encoding, then try uploading again."
                              '\n\n<span class="glyphicon glyphicon-exclamation-sign" aria-hidden="true">'
                              '</span> <strong>Error message:</strong> {}'),
                            err),
                        'error':
                        format(err)
                    })
            except ValueError as err:
                raise CoveInputDataError(
                    context={
                        "sub_title":
                        _("Sorry, we can't process that data"),
                        "link":
                        "index",
                        "link_text":
                        _("Try Again"),
                        "msg":
                        format_html(
                            _(
                                "We think you tried to upload a JSON file, but it is not well formed JSON."
                                '\n\n<span class="glyphicon glyphicon-exclamation-sign" aria-hidden="true">'
                                "</span> <strong>Error message:</strong> {}",
                            ),
                            err,
                        ),
                        "error":
                        format(err),
                    })

            if not isinstance(json_data, dict):
                raise CoveInputDataError(
                    context={
                        "sub_title":
                        _("Sorry, we can't process that data"),
                        "link":
                        "index",
                        "link_text":
                        _("Try Again"),
                        "msg":
                        _("OCDS JSON should have an object as the top level, the JSON you supplied does not."
                          ),
                    })

            version_in_data = json_data.get("version", "")
            db_data.data_schema_version = version_in_data
            select_version = post_version_choice or db_data.schema_version
            schema_ocds = SchemaOCDS(select_version=select_version,
                                     release_data=json_data,
                                     lib_cove_ocds_config=lib_cove_ocds_config,
                                     record_pkg="records" in json_data)

            if schema_ocds.missing_package:
                exceptions.raise_missing_package_error()
            if schema_ocds.invalid_version_argument:
                # This shouldn't happen unless the user sends random POST data.
                exceptions.raise_invalid_version_argument(post_version_choice)
            if schema_ocds.invalid_version_data:
                if isinstance(version_in_data, str) and re.compile(
                        "^\d+\.\d+\.\d+$").match(version_in_data):
                    exceptions.raise_invalid_version_data_with_patch(
                        version_in_data)
                else:
                    if not isinstance(version_in_data, str):
                        version_in_data = "{} (it must be a string)".format(
                            str(version_in_data))
                    context["unrecognized_version_data"] = version_in_data

            if schema_ocds.version != db_data.schema_version:
                replace = True
            if schema_ocds.extensions:
                schema_ocds.create_extended_schema_file(upload_dir, upload_url)
            url = schema_ocds.extended_schema_file or schema_ocds.schema_url

            if "records" in json_data:
                context["conversion"] = None
            else:
                # Replace the spreadsheet conversion only if it exists already.
                converted_path = os.path.join(upload_dir, "flattened")
                replace_converted = replace and os.path.exists(converted_path +
                                                               ".xlsx")

                with warnings.catch_warnings():
                    warnings.filterwarnings(
                        'ignore'
                    )  # flattentool uses UserWarning, so can't set a specific category

                    convert_json_context = convert_json(
                        upload_dir,
                        upload_url,
                        file_name,
                        lib_cove_ocds_config,
                        schema_url=url,
                        replace=replace_converted,
                        request=request,
                        flatten=request.POST.get("flatten"),
                    )

                context.update(convert_json_context)

    else:
        # Use the lowest release pkg schema version accepting 'version' field
        metatab_schema_url = SchemaOCDS(
            select_version="1.1",
            lib_cove_ocds_config=lib_cove_ocds_config).pkg_schema_url
        metatab_data = get_spreadsheet_meta_data(upload_dir, file_name,
                                                 metatab_schema_url, file_type)
        if "version" not in metatab_data:
            metatab_data["version"] = "1.0"
        else:
            db_data.data_schema_version = metatab_data["version"]

        select_version = post_version_choice or db_data.schema_version
        schema_ocds = SchemaOCDS(
            select_version=select_version,
            release_data=metatab_data,
            lib_cove_ocds_config=lib_cove_ocds_config,
        )

        # Unlike for JSON data case above, do not check for missing data package
        if schema_ocds.invalid_version_argument:
            # This shouldn't happen unless the user sends random POST data.
            exceptions.raise_invalid_version_argument(post_version_choice)
        if schema_ocds.invalid_version_data:
            version_in_data = metatab_data.get("version")
            if re.compile("^\d+\.\d+\.\d+$").match(version_in_data):
                exceptions.raise_invalid_version_data_with_patch(
                    version_in_data)
            else:
                context["unrecognized_version_data"] = version_in_data

        # Replace json conversion when user chooses a different schema version.
        if db_data.schema_version and schema_ocds.version != db_data.schema_version:
            replace = True

        if schema_ocds.extensions:
            schema_ocds.create_extended_schema_file(upload_dir, upload_url)
        url = schema_ocds.extended_schema_file or schema_ocds.schema_url
        pkg_url = schema_ocds.pkg_schema_url

        context.update(
            convert_spreadsheet(
                upload_dir,
                upload_url,
                file_name,
                file_type,
                lib_cove_ocds_config,
                schema_url=url,
                pkg_schema_url=pkg_url,
                replace=replace,
            ))

        with open(context["converted_path"], encoding="utf-8") as fp:
            json_data = json.load(fp,
                                  parse_float=Decimal,
                                  object_pairs_hook=OrderedDict)

    if replace:
        if os.path.exists(validation_errors_path):
            os.remove(validation_errors_path)

    context = common_checks_ocds(context, upload_dir, json_data, schema_ocds)

    if schema_ocds.json_deref_error:
        exceptions.raise_json_deref_error(schema_ocds.json_deref_error)

    context.update({
        "data_schema_version":
        db_data.data_schema_version,
        "first_render":
        not db_data.rendered,
        "validation_errors_grouped":
        group_validation_errors(context["validation_errors"]),
    })

    schema_version = getattr(schema_ocds, "version", None)
    if schema_version:
        db_data.schema_version = schema_version
    if not db_data.rendered:
        db_data.rendered = True

    db_data.save()

    if "records" in json_data:
        ocds_show_schema = SchemaOCDS(record_pkg=True)
        ocds_show_deref_schema = ocds_show_schema.get_schema_obj(deref=True)
        template = "cove_ocds/explore_record.html"
        if hasattr(json_data, "get") and hasattr(json_data.get("records"),
                                                 "__iter__"):
            context["records"] = json_data["records"]
        else:
            context["records"] = []
        if isinstance(json_data["records"],
                      list) and len(json_data["records"]) < 100:
            context["ocds_show_data"] = ocds_show_data(json_data,
                                                       ocds_show_deref_schema)
    else:
        ocds_show_schema = SchemaOCDS(record_pkg=False)
        ocds_show_deref_schema = ocds_show_schema.get_schema_obj(deref=True)
        template = "cove_ocds/explore_release.html"
        if hasattr(json_data, "get") and hasattr(json_data.get("releases"),
                                                 "__iter__"):
            context["releases"] = json_data["releases"]
            if (isinstance(json_data["releases"], list)
                    and len(json_data["releases"]) < 100):
                context["ocds_show_data"] = ocds_show_data(
                    json_data, ocds_show_deref_schema)

            # Parse release dates into objects so the template can format them.
            for release in context["releases"]:
                if hasattr(release, "get") and release.get("date"):
                    if validate_rfc3339(release["date"]):
                        release["date"] = parser.parse(release["date"])
                    else:
                        release["date"] = None
            if context.get("releases_aggregates"):
                date_fields = [
                    "max_award_date",
                    "max_contract_date",
                    "max_release_date",
                    "max_tender_date",
                    "min_award_date",
                    "min_contract_date",
                    "min_release_date",
                    "min_tender_date",
                ]
                for field in date_fields:
                    if context["releases_aggregates"].get(field):
                        if validate_rfc3339(
                                context["releases_aggregates"][field]):
                            context["releases_aggregates"][
                                field] = parser.parse(
                                    context["releases_aggregates"][field])
                        else:
                            context["releases_aggregates"][field] = None
        else:
            context["releases"] = []

    return render(request, template, context)
Beispiel #10
0
def explore_bods(request, pk):
    context, db_data, error = explore_data_context(request, pk)
    if error:
        return error

    lib_cove_bods_config = LibCoveBODSConfig()
    lib_cove_bods_config.config['root_list_path'] = settings.COVE_CONFIG[
        'root_list_path']
    lib_cove_bods_config.config['root_id'] = settings.COVE_CONFIG['root_id']
    lib_cove_bods_config.config['id_name'] = settings.COVE_CONFIG['id_name']
    lib_cove_bods_config.config['root_is_list'] = settings.COVE_CONFIG[
        'root_is_list']
    lib_cove_bods_config.config[
        'bods_additional_checks_person_birthdate_max_year'] = datetime.datetime.now(
        ).year
    lib_cove_bods_config.config['bods_additional_checks_person_birthdate_min_year'] = \
        datetime.datetime.now().year - 120

    upload_dir = db_data.upload_dir()
    upload_url = db_data.upload_url()
    file_name = db_data.original_file.file.name
    file_type = context['file_type']

    if file_type == 'json':
        # open the data first so we can inspect for record package
        with open(file_name, encoding='utf-8') as fp:
            try:
                json_data = json.load(fp, parse_float=Decimal)
            except ValueError as err:
                raise CoveInputDataError(
                    context={
                        'sub_title':
                        _("Sorry, we can't process that data"),
                        'link':
                        'index',
                        'link_text':
                        _('Try Again'),
                        'msg':
                        _(
                            format_html(
                                'We think you tried to upload a JSON file, but it is not well formed JSON.'
                                '\n\n<span class="glyphicon glyphicon-exclamation-sign" aria-hidden="true">'
                                '</span> <strong>Error message:</strong> {}',
                                err)),
                        'error':
                        format(err)
                    })

            if not isinstance(json_data, list):
                raise CoveInputDataError(
                    context={
                        'sub_title':
                        _("Sorry, we can't process that data"),
                        'link':
                        'index',
                        'link_text':
                        _('Try Again'),
                        'msg':
                        _('BODS JSON should have a list as the top level, the JSON you supplied does not.'
                          ),
                    })

        schema_bods = SchemaBODS(json_data=json_data,
                                 lib_cove_bods_config=lib_cove_bods_config)

        context.update(
            convert_json(upload_dir,
                         upload_url,
                         file_name,
                         lib_cove_bods_config,
                         schema_url=schema_bods.release_pkg_schema_url,
                         replace=True,
                         request=request,
                         flatten=True))

    else:

        schema_bods = SchemaBODS(lib_cove_bods_config=lib_cove_bods_config)
        context.update(
            convert_spreadsheet(upload_dir,
                                upload_url,
                                file_name,
                                file_type,
                                lib_cove_bods_config,
                                schema_url=schema_bods.release_pkg_schema_url))
        with open(context['converted_path'], encoding='utf-8') as fp:
            json_data = json.load(fp, parse_float=Decimal)

    context = common_checks_bods(context,
                                 upload_dir,
                                 json_data,
                                 schema_bods,
                                 lib_cove_bods_config=lib_cove_bods_config)

    if not db_data.rendered:
        db_data.rendered = True
    db_data.save()

    # Some extra info from the Schema
    context['schema_version_used'] = schema_bods.schema_version

    # We need to calculate some stats for showing in the view
    total_ownership_or_control_interest_statements = 0
    for key, count in context['statistics'][
            'count_ownership_or_control_statement_interest_statement_types'].items(
            ):
        total_ownership_or_control_interest_statements += count
    context['statistics'][
        'count_ownership_or_control_interest_statement'] = total_ownership_or_control_interest_statements  # noqa

    # The use of r_e_type is to stop flake8 complaining about line length
    r_e_type = 'registeredEntity'
    context['statistics'][
        'count_entities_registeredEntity_legalEntity_with_any_identifier'] = (
            context['statistics']
            ['count_entity_statements_types_with_any_identifier'][r_e_type] +
            context['statistics']
            ['count_entity_statements_types_with_any_identifier']
            ['legalEntity'])
    context['statistics'][
        'count_entities_registeredEntity_legalEntity_with_any_identifier_with_id_and_scheme'] = (
            context['statistics']
            ['count_entity_statements_types_with_any_identifier_with_id_and_scheme']
            [r_e_type] + context['statistics']
            ['count_entity_statements_types_with_any_identifier_with_id_and_scheme']
            ['legalEntity'])
    context['statistics']['count_entities_registeredEntity_legalEntity'] = (
        context['statistics']['count_entity_statements_types'][r_e_type] +
        context['statistics']['count_entity_statements_types']['legalEntity'])

    template = 'cove_bods/explore.html'

    return render(request, template, context)
Beispiel #11
0
def explore_ocds(request, pk):
    context, db_data, error = explore_data_context(request, pk)
    if error:
        return error

    upload_dir = db_data.upload_dir()
    upload_url = db_data.upload_url()
    file_name = db_data.original_file.file.name
    file_type = context['file_type']

    post_version_choice = request.POST.get('version')
    replace = False
    validation_errors_path = os.path.join(upload_dir,
                                          'validation_errors-3.json')

    if file_type == 'json':
        # open the data first so we can inspect for record package
        with open(file_name, encoding='utf-8') as fp:
            try:
                json_data = json.load(fp, parse_float=Decimal)
            except ValueError as err:
                raise CoveInputDataError(
                    context={
                        'sub_title':
                        _("Sorry, we can't process that data"),
                        'link':
                        'index',
                        'link_text':
                        _('Try Again'),
                        'msg':
                        _('We think you tried to upload a JSON file, but it is not well formed JSON.'
                          '\n\n<span class="glyphicon glyphicon-exclamation-sign" aria-hidden="true">'
                          '</span> <strong>Error message:</strong> {}'.format(
                              err)),
                        'error':
                        format(err)
                    })

            if not isinstance(json_data, dict):
                raise CoveInputDataError(
                    context={
                        'sub_title':
                        _("Sorry, we can't process that data"),
                        'link':
                        'index',
                        'link_text':
                        _('Try Again'),
                        'msg':
                        _('OCDS JSON should have an object as the top level, the JSON you supplied does not.'
                          ),
                    })

            version_in_data = json_data.get('version', '')
            db_data.data_schema_version = version_in_data
            select_version = post_version_choice or db_data.schema_version
            schema_ocds = SchemaOCDS(select_version=select_version,
                                     release_data=json_data)

            if schema_ocds.missing_package:
                exceptions.raise_missing_package_error()
            if schema_ocds.invalid_version_argument:
                # This shouldn't happen unless the user sends random POST data.
                exceptions.raise_invalid_version_argument(post_version_choice)
            if schema_ocds.invalid_version_data:
                if isinstance(version_in_data, str) and re.compile(
                        '^\d+\.\d+\.\d+$').match(version_in_data):
                    exceptions.raise_invalid_version_data_with_patch(
                        version_in_data)
                else:
                    if not isinstance(version_in_data, str):
                        version_in_data = '{} (it must be a string)'.format(
                            str(version_in_data))
                    context['unrecognized_version_data'] = version_in_data

            if schema_ocds.version != db_data.schema_version:
                replace = True
            if schema_ocds.extensions:
                schema_ocds.create_extended_release_schema_file(
                    upload_dir, upload_url)
            url = schema_ocds.extended_schema_file or schema_ocds.release_schema_url

            if 'records' in json_data:
                context['conversion'] = None
            else:

                # Replace the spreadsheet conversion only if it exists already.
                converted_path = os.path.join(upload_dir, 'flattened')
                replace_converted = replace and os.path.exists(converted_path +
                                                               '.xlsx')
                context.update(
                    convert_json(upload_dir,
                                 upload_url,
                                 file_name,
                                 schema_url=url,
                                 replace=replace_converted,
                                 request=request,
                                 flatten=request.POST.get('flatten')))

    else:
        # Use the lowest release pkg schema version accepting 'version' field
        metatab_schema_url = SchemaOCDS(
            select_version='1.1').release_pkg_schema_url
        metatab_data = get_spreadsheet_meta_data(upload_dir, file_name,
                                                 metatab_schema_url, file_type)
        if 'version' not in metatab_data:
            metatab_data['version'] = '1.0'
        else:
            db_data.data_schema_version = metatab_data['version']

        select_version = post_version_choice or db_data.schema_version
        schema_ocds = SchemaOCDS(select_version=select_version,
                                 release_data=metatab_data)

        # Unlike for JSON data case above, do not check for missing data package
        if schema_ocds.invalid_version_argument:
            # This shouldn't happen unless the user sends random POST data.
            exceptions.raise_invalid_version_argument(post_version_choice)
        if schema_ocds.invalid_version_data:
            version_in_data = metatab_data.get('version')
            if re.compile('^\d+\.\d+\.\d+$').match(version_in_data):
                exceptions.raise_invalid_version_data_with_patch(
                    version_in_data)
            else:
                context['unrecognized_version_data'] = version_in_data

        # Replace json conversion when user chooses a different schema version.
        if db_data.schema_version and schema_ocds.version != db_data.schema_version:
            replace = True

        if schema_ocds.extensions:
            schema_ocds.create_extended_release_schema_file(
                upload_dir, upload_url)
        url = schema_ocds.extended_schema_file or schema_ocds.release_schema_url
        pkg_url = schema_ocds.release_pkg_schema_url

        context.update(
            convert_spreadsheet(upload_dir,
                                upload_url,
                                file_name,
                                file_type,
                                schema_url=url,
                                pkg_schema_url=pkg_url,
                                replace=replace))

        with open(context['converted_path'], encoding='utf-8') as fp:
            json_data = json.load(fp, parse_float=Decimal)

    if replace:
        if os.path.exists(validation_errors_path):
            os.remove(validation_errors_path)

    context = common_checks_ocds(context, upload_dir, json_data, schema_ocds)

    if schema_ocds.json_deref_error:
        exceptions.raise_json_deref_error(schema_ocds.json_deref_error)

    context.update({
        'data_schema_version': db_data.data_schema_version,
        'first_render': not db_data.rendered
    })

    schema_version = getattr(schema_ocds, 'version', None)
    if schema_version:
        db_data.schema_version = schema_version
    if not db_data.rendered:
        db_data.rendered = True

    db_data.save()

    if 'records' in json_data:
        template = 'cove_ocds/explore_record.html'
        if hasattr(json_data, 'get') and hasattr(json_data.get('records'),
                                                 '__iter__'):
            context['records'] = json_data['records']
        else:
            context['records'] = []
    else:
        template = 'cove_ocds/explore_release.html'
        if hasattr(json_data, 'get') and hasattr(json_data.get('releases'),
                                                 '__iter__'):
            context['releases'] = json_data['releases']
        else:
            context['releases'] = []

    return render(request, template, context)
Beispiel #12
0
def explore_ocds(request, pk):
    context, db_data, error = explore_data_context(request, pk)
    if error:
        return error

    lib_cove_ocds_config = LibCoveOCDSConfig()
    lib_cove_ocds_config.config['current_language'] = translation.get_language()

    upload_dir = db_data.upload_dir()
    upload_url = db_data.upload_url()
    file_name = db_data.original_file.file.name
    file_type = context['file_type']

    post_version_choice = request.POST.get('version')
    replace = False
    validation_errors_path = os.path.join(upload_dir, 'validation_errors-3.json')

    if file_type == 'json':
        # open the data first so we can inspect for record package
        with open(file_name, encoding='utf-8') as fp:
            try:
                json_data = json.load(fp, parse_float=Decimal)
            except ValueError as err:
                raise CoveInputDataError(context={
                    'sub_title': _("Sorry, we can't process that data"),
                    'link': 'index',
                    'link_text': _('Try Again'),
                    'msg': _(format_html('We think you tried to upload a JSON file, but it is not well formed JSON.'
                             '\n\n<span class="glyphicon glyphicon-exclamation-sign" aria-hidden="true">'
                             '</span> <strong>Error message:</strong> {}', err)),
                    'error': format(err)
                })

            if not isinstance(json_data, dict):
                raise CoveInputDataError(context={
                    'sub_title': _("Sorry, we can't process that data"),
                    'link': 'index',
                    'link_text': _('Try Again'),
                    'msg': _('OCDS JSON should have an object as the top level, the JSON you supplied does not.'),
                })

            version_in_data = json_data.get('version', '')
            db_data.data_schema_version = version_in_data
            select_version = post_version_choice or db_data.schema_version
            schema_ocds = SchemaOCDS(select_version=select_version, release_data=json_data, lib_cove_ocds_config=lib_cove_ocds_config)

            if schema_ocds.missing_package:
                exceptions.raise_missing_package_error()
            if schema_ocds.invalid_version_argument:
                # This shouldn't happen unless the user sends random POST data.
                exceptions.raise_invalid_version_argument(post_version_choice)
            if schema_ocds.invalid_version_data:
                if isinstance(version_in_data, str) and re.compile('^\d+\.\d+\.\d+$').match(version_in_data):
                    exceptions.raise_invalid_version_data_with_patch(version_in_data)
                else:
                    if not isinstance(version_in_data, str):
                        version_in_data = '{} (it must be a string)'.format(str(version_in_data))
                    context['unrecognized_version_data'] = version_in_data

            if schema_ocds.version != db_data.schema_version:
                replace = True
            if schema_ocds.extensions:
                schema_ocds.create_extended_release_schema_file(upload_dir, upload_url)
            url = schema_ocds.extended_schema_file or schema_ocds.release_schema_url

            if 'records' in json_data:
                context['conversion'] = None
            else:

                # Replace the spreadsheet conversion only if it exists already.
                converted_path = os.path.join(upload_dir, 'flattened')
                replace_converted = replace and os.path.exists(converted_path + '.xlsx')
                context.update(convert_json(upload_dir, upload_url, file_name, lib_cove_ocds_config,
                                            schema_url=url, replace=replace_converted,
                                            request=request, flatten=request.POST.get('flatten')))

    else:
        # Use the lowest release pkg schema version accepting 'version' field
        metatab_schema_url = SchemaOCDS(select_version='1.1', lib_cove_ocds_config=lib_cove_ocds_config).release_pkg_schema_url
        metatab_data = get_spreadsheet_meta_data(upload_dir, file_name, metatab_schema_url, file_type)
        if 'version' not in metatab_data:
            metatab_data['version'] = '1.0'
        else:
            db_data.data_schema_version = metatab_data['version']

        select_version = post_version_choice or db_data.schema_version
        schema_ocds = SchemaOCDS(select_version=select_version, release_data=metatab_data, lib_cove_ocds_config=lib_cove_ocds_config)

        # Unlike for JSON data case above, do not check for missing data package
        if schema_ocds.invalid_version_argument:
            # This shouldn't happen unless the user sends random POST data.
            exceptions.raise_invalid_version_argument(post_version_choice)
        if schema_ocds.invalid_version_data:
            version_in_data = metatab_data.get('version')
            if re.compile('^\d+\.\d+\.\d+$').match(version_in_data):
                exceptions.raise_invalid_version_data_with_patch(version_in_data)
            else:
                context['unrecognized_version_data'] = version_in_data

        # Replace json conversion when user chooses a different schema version.
        if db_data.schema_version and schema_ocds.version != db_data.schema_version:
            replace = True

        if schema_ocds.extensions:
            schema_ocds.create_extended_release_schema_file(upload_dir, upload_url)
        url = schema_ocds.extended_schema_file or schema_ocds.release_schema_url
        pkg_url = schema_ocds.release_pkg_schema_url

        context.update(convert_spreadsheet(upload_dir, upload_url, file_name, file_type, lib_cove_ocds_config,
                                           schema_url=url,
                                           pkg_schema_url=pkg_url, replace=replace))

        with open(context['converted_path'], encoding='utf-8') as fp:
            json_data = json.load(fp, parse_float=Decimal)

    if replace:
        if os.path.exists(validation_errors_path):
            os.remove(validation_errors_path)

    context = common_checks_ocds(context, upload_dir, json_data, schema_ocds)

    if schema_ocds.json_deref_error:
        exceptions.raise_json_deref_error(schema_ocds.json_deref_error)

    context.update({
        'data_schema_version': db_data.data_schema_version,
        'first_render': not db_data.rendered,
        'validation_errors_grouped': group_validation_errors(context['validation_errors']),
    })

    schema_version = getattr(schema_ocds, 'version', None)
    if schema_version:
        db_data.schema_version = schema_version
    if not db_data.rendered:
        db_data.rendered = True

    db_data.save()

    ocds_show_schema = SchemaOCDS()
    ocds_show_deref_schema = ocds_show_schema.get_release_schema_obj(deref=True)

    if 'records' in json_data:
        template = 'cove_ocds/explore_record.html'
        if hasattr(json_data, 'get') and hasattr(json_data.get('records'), '__iter__'):
            context['records'] = json_data['records']
        else:
            context['records'] = []
        if isinstance(json_data['records'], list) and len(json_data['records']) < 100:
            context['ocds_show_data'] = ocds_show_data(json_data, ocds_show_deref_schema)
    else:
        template = 'cove_ocds/explore_release.html'
        if hasattr(json_data, 'get') and hasattr(json_data.get('releases'), '__iter__'):
            context['releases'] = json_data['releases']
            if isinstance(json_data['releases'], list) and len(json_data['releases']) < 100:
                context['ocds_show_data'] = ocds_show_data(json_data, ocds_show_deref_schema)

            # Parse release dates into objects so the template can format them.
            for release in context['releases']:
                if hasattr(release, 'get') and release.get('date'):
                    if validate_rfc3339(release['date']):
                        release['date'] = parser.parse(release['date'])
                    else:
                        release['date'] = None
            if context.get('releases_aggregates'):
                date_fields = ['max_award_date', 'max_contract_date', 'max_release_date', 'max_tender_date', 'min_award_date', 'min_contract_date', 'min_release_date', 'min_tender_date']
                for field in date_fields:
                    if context['releases_aggregates'].get(field):
                        if(validate_rfc3339(context['releases_aggregates'][field])):
                            context['releases_aggregates'][field] = parser.parse(context['releases_aggregates'][field])
                        else:
                            context['releases_aggregates'][field] = None
        else:
            context['releases'] = []

    return render(request, template, context)
Beispiel #13
0
def explore_bods(request, pk):
    context, db_data, error = explore_data_context(request, pk)
    if error:
        return error

    lib_cove_bods_config = LibCoveBODSConfig()
    lib_cove_bods_config.config['root_list_path'] = settings.COVE_CONFIG['root_list_path']
    lib_cove_bods_config.config['root_id'] = settings.COVE_CONFIG['root_id']
    lib_cove_bods_config.config['id_name'] = settings.COVE_CONFIG['id_name']
    lib_cove_bods_config.config['root_is_list'] = settings.COVE_CONFIG['root_is_list']

    upload_dir = db_data.upload_dir()
    upload_url = db_data.upload_url()
    file_name = db_data.original_file.file.name
    file_type = context['file_type']

    if file_type == 'json':
        # open the data first so we can inspect for record package
        with open(file_name, encoding='utf-8') as fp:
            try:
                json_data = json.load(fp, parse_float=Decimal)
            except ValueError as err:
                raise CoveInputDataError(context={
                    'sub_title': _("Sorry, we can't process that data"),
                    'link': 'index',
                    'link_text': _('Try Again'),
                    'msg': _(format_html('We think you tried to upload a JSON file, but it is not well formed JSON.'
                                         '\n\n<span class="glyphicon glyphicon-exclamation-sign" aria-hidden="true">'
                                         '</span> <strong>Error message:</strong> {}', err)),
                    'error': format(err)
                })

            if not isinstance(json_data, list):
                raise CoveInputDataError(context={
                    'sub_title': _("Sorry, we can't process that data"),
                    'link': 'index',
                    'link_text': _('Try Again'),
                    'msg': _('BODS JSON should have an list as the top level, the JSON you supplied does not.'),
                })

        schema_bods = SchemaBODS(lib_cove_bods_config=lib_cove_bods_config)

        context.update(convert_json(upload_dir, upload_url, file_name, lib_cove_bods_config,
                                    schema_url=schema_bods.release_pkg_schema_url, replace=True,
                                    request=request, flatten=True))

    else:

        schema_bods = SchemaBODS(lib_cove_bods_config=lib_cove_bods_config)
        context.update(convert_spreadsheet(upload_dir, upload_url, file_name, file_type, lib_cove_bods_config,
                                           schema_url=schema_bods.release_pkg_schema_url))
        with open(context['converted_path'], encoding='utf-8') as fp:
            json_data = json.load(fp, parse_float=Decimal)

    context = common_checks_bods(context, upload_dir, json_data, schema_bods)

    if not db_data.rendered:
        db_data.rendered = True
    db_data.save()

    template = 'cove_bods/explore.html'

    return render(request, template, context)