def generate_index_entry(args):

            index = {}

            # Generate title
            country_names = None
            if args[2] is not None:
                country_names = [c.name_en for c in args[2]]

            title = get_title(
                api_name=args[0],
                app_name=args[1],
                country_names=country_names,
                trade_flow=args[3],
                years=args[4],
                product_name=args[5].name_en if args[5] is not None else None
            )
            index["title"] = title

            # Generate url
            country_codes = None
            if args[2] is not None:
                country_codes = [c.name_3char.lower() for c in args[2]]

            url = params_to_url(
                api_name=args[0],
                app_name=args[1][0],
                country_codes=country_codes,
                trade_flow=args[3],
                years=args[4],
                product_code=args[5].code if args[5] is not None else None
            )
            index["url"] = url

            regions = None
            if args[2] is not None:
                regions = [c.region.name for c in args[2]]

            # Add in params into elasticsearch in case we need them later
            kwargs = dict(
                api_name=args[0],
                app_name=args[1],
                country_names=country_names,
                country_codes=country_codes,
                regions=regions,
                trade_flow=args[3],
                years=args[4],
                product_name=args[5].name_en if args[5] is not None else None,
                product_code=args[5].code if args[5] is not None else None,
                product_community=args[5].community.name if args[5] is not None else None,
            )
            kwargs = {k: v for k, v in kwargs.iteritems() if v is not None}
            index.update(kwargs)

            return index
예제 #2
0
        def generate_index_entry(args):

            index = {}

            # Generate title
            country_names = None
            if args[2] is not None:
                country_names = [c.name_en for c in args[2]]

            title = get_title(
                api_name=args[0],
                app_name=args[1],
                country_names=country_names,
                trade_flow=args[3],
                years=args[4],
                product_name=args[5].name_en if args[5] is not None else None)
            index["title"] = title

            # Generate url
            country_codes = None
            if args[2] is not None:
                country_codes = [c.name_3char.lower() for c in args[2]]

            url = params_to_url(
                api_name=args[0],
                app_name=args[1][0],
                country_codes=country_codes,
                trade_flow=args[3],
                years=args[4],
                product_code=args[5].code if args[5] is not None else None)
            index["url"] = url

            regions = None
            if args[2] is not None:
                regions = [c.region.name for c in args[2]]

            # Add in params into elasticsearch in case we need them later
            kwargs = dict(
                api_name=args[0],
                app_name=args[1],
                country_names=country_names,
                country_codes=country_codes,
                regions=regions,
                trade_flow=args[3],
                years=args[4],
                product_name=args[5].name_en if args[5] is not None else None,
                product_code=args[5].code if args[5] is not None else None,
                product_community=args[5].community.name
                if args[5] is not None else None,
            )
            kwargs = {k: v for k, v in kwargs.iteritems() if v is not None}
            index.update(kwargs)

            return index
def api_search(request):

    query = request.GET.get("term", None)
    if query is None:
        return HttpResponse("[]")

    # For user experiment, run search version 1 or 2, 2 being more feature
    # rich and having parsed filters. See atlas-data#32
    search_version = int(request.GET.get("search_var", 0))

    # Parse search query
    query, query_type, kwargs = parse_search(
        query, strip_keywords=(search_version != 1))

    # Resolve any synonyms. feasibility -> pie_scatter etc.
    if "app_name" in kwargs:
        given_app_name = kwargs["app_name"][0]
        kwargs["app_name"] = [
            APP_NAME_SYNONYMS.get(given_app_name, given_app_name)
        ]

    # Viz params are not an elasticsearch filter so pop that off
    viz_params = kwargs.pop("viz_params", None)

    # Prepare elasticsearch filters
    if search_version == 2 or search_version == 0:
        filters = prepare_filters(kwargs)
    else:
        filters = {}

    es_query = {"query": {"filtered": {}}, "size": 8}

    # Add filters to the query if they were given. Filters are ANDed.
    if len(filters) > 0:
        es_filters = [{
            "terms": {
                k: [x.lower() for x in v]
            }
        } for k, v in filters.iteritems()]
        es_filters = {"bool": {"must": es_filters}}
        es_query["query"]["filtered"]["filter"] = es_filters

    # Add fuzzy search for query string if any non-filter query string remains
    # after taking out the filters
    if query.strip() != "":
        es_query["query"]["filtered"]["query"] = {
            "fuzzy_like_this": {
                "like_text": query,
                "fields": ["title"],
                "max_query_terms": 15,
                "prefix_length": 3
            }
        }

    # Do the query
    es = Elasticsearch()
    result = es.search(index="questions", body=es_query)

    # Format the results in a way that complies with the OpenSearch standard's
    # suggestion extension
    labels = []
    urls = []
    for x in result['hits']['hits']:
        data = x['_source']

        # Regenerate title and url so we can add stuff into it dynamically,
        # like the year being searched for, or forcing an app.
        years = kwargs.get('years', None)

        # Possible apps this title could be visualized as
        app_names = data['app_name']

        # If the app the user requested is possible, use that. Otherwise, use
        # the first one as default. App names in the elasticsearch index are
        # sorted in a certain way for this to make sense so check out the
        # indexer script
        requested_app_name = filters.get("app_name", [None])[0]
        if requested_app_name in app_names:
            app_name = requested_app_name
        else:
            app_name = app_names[0]

        if years and len(years) == 2:
            if app_name in ["map", "tree_map"]:
                # If multiple years are specified and we can do a stacked
                # graph, do a stacked graph instead of a treemap or map
                app_name = "stacked"
            elif app_name in ["product_space", "pie_scatter"]:
                # Some apps can never have multiple years so just use the first
                # one specified
                years = [years[0]]

        # If no years specified, use default years
        if years is None:
            if app_name == "stacked":
                years = [settings.YEAR_MIN_HS4, settings.YEAR_MAX_HS4]
            else:
                years = [settings.YEAR_MAX_HS4]

        # You can't show a product space based on imports so ignore those
        if app_name == "product_space" and data["trade_flow"] == "import":
            continue

        title = get_title(api_name=data['api_name'],
                          app_name=app_name,
                          country_names=data.get('country_names', None),
                          trade_flow=data['trade_flow'],
                          years=years,
                          product_name=data.get('product_name', None))
        url = params_to_url(api_name=data['api_name'],
                            app_name=app_name,
                            country_codes=data.get('country_codes', None),
                            trade_flow=data['trade_flow'],
                            years=years,
                            product_code=data.get('product_code', None))

        if viz_params:
            if app_name == "pie_scatter":
                url += "?queryActivated=True"
                url += "&yaxis=%s" % viz_params[0]

        labels.append(title)
        urls.append(settings.HTTP_HOST + url)

    return HttpResponse(json.dumps([query, labels, [], urls]))
def api_search(request):

    query = request.GET.get("term", None)
    if query is None:
        return HttpResponse("[]")

    # For user experiment, run search version 1 or 2, 2 being more feature
    # rich and having parsed filters. See atlas-data#32
    search_version = int(request.GET.get("search_var", 0))

    # Parse search query
    query, query_type, kwargs = parse_search(
        query,
        strip_keywords=(search_version != 1))

    # Resolve any synonyms. feasibility -> pie_scatter etc.
    if "app_name" in kwargs:
        given_app_name = kwargs["app_name"][0]
        kwargs["app_name"] = [APP_NAME_SYNONYMS.get(given_app_name,
                                                    given_app_name)]

    # Viz params are not an elasticsearch filter so pop that off
    viz_params = kwargs.pop("viz_params", None)

    # Prepare elasticsearch filters
    if search_version == 2 or search_version == 0:
        filters = prepare_filters(kwargs)
    else:
        filters = {}

    es_query = {
        "query": {
            "filtered": {}
        },
        "size": 8
    }

    # Add filters to the query if they were given. Filters are ANDed.
    if len(filters) > 0:
        es_filters = [{"terms": {k: [x.lower() for x in v]}}
                      for k, v in filters.iteritems()]
        es_filters = {"bool": {"must": es_filters}}
        es_query["query"]["filtered"]["filter"] = es_filters

    # Add fuzzy search for query string if any non-filter query string remains
    # after taking out the filters
    if query.strip() != "":
        es_query["query"]["filtered"]["query"] = {
            "fuzzy_like_this": {
                "like_text": query,
                "fields": ["title"],
                "max_query_terms": 15,
                "prefix_length": 3
            }
        }

    # Do the query
    es = Elasticsearch()
    result = es.search(index="questions", body=es_query)

    # Format the results in a way that complies with the OpenSearch standard's
    # suggestion extension
    labels = []
    urls = []
    for x in result['hits']['hits']:
        data = x['_source']

        # Regenerate title and url so we can add stuff into it dynamically,
        # like the year being searched for, or forcing an app.
        years = kwargs.get('years', None)

        # Possible apps this title could be visualized as
        app_names = data['app_name']

        # If the app the user requested is possible, use that. Otherwise, use
        # the first one as default. App names in the elasticsearch index are
        # sorted in a certain way for this to make sense so check out the
        # indexer script
        requested_app_name = filters.get("app_name", [None])[0]
        if requested_app_name in app_names:
            app_name = requested_app_name
        else:
            app_name = app_names[0]

        if years and len(years) == 2:
            if app_name in ["map", "tree_map"]:
                # If multiple years are specified and we can do a stacked
                # graph, do a stacked graph instead of a treemap or map
                app_name = "stacked"
            elif app_name in ["product_space", "pie_scatter"]:
                # Some apps can never have multiple years so just use the first
                # one specified
                years = [years[0]]

        # If no years specified, use default years
        if years is None:
            if app_name == "stacked":
                years = [settings.YEAR_MIN_HS4, settings.YEAR_MAX_HS4]
            else:
                years = [settings.YEAR_MAX_HS4]

        # You can't show a product space based on imports so ignore those
        if app_name == "product_space" and data["trade_flow"] == "import":
            continue

        title = get_title(
            api_name=data['api_name'],
            app_name=app_name,
            country_names=data.get('country_names', None),
            trade_flow=data['trade_flow'],
            years=years,
            product_name=data.get('product_name', None)
        )
        url = params_to_url(
            api_name=data['api_name'],
            app_name=app_name,
            country_codes=data.get('country_codes', None),
            trade_flow=data['trade_flow'],
            years=years,
            product_code=data.get('product_code', None)
        )

        if viz_params:
            if app_name == "pie_scatter":
                url += "?queryActivated=True"
                url += "&yaxis=%s" % viz_params[0]

        labels.append(title)
        urls.append(settings.HTTP_HOST + url)

    return HttpResponse(json.dumps([
        query,
        labels,
        [],
        urls
    ]))