def __init__(self, date): self.orig_date = date self.fd = FlexiDate.from_str(str(self.orig_date)) self.year = None self.month = None self.day = None if self.fd is not None: self.year = self.fd.year self.month = '1' if self.fd.month == '0' or self.fd.month is '' else self.fd.month self.day = '1' if self.fd.day == '0' or self.fd.day is '' else self.fd.day
def dotest(fd): out = FlexiDate.from_str(str(fd)) assert str(out) == str(fd)
def build_search_results_dsl(request): term_filter = request.GET.get('termFilter', '') spatial_filter = JSONDeserializer().deserialize(request.GET.get('mapFilter', '{}')) export = request.GET.get('export', None) page = 1 if request.GET.get('page') == '' else int(request.GET.get('page', 1)) temporal_filter = JSONDeserializer().deserialize(request.GET.get('temporalFilter', '{}')) se = SearchEngineFactory().create() if export != None: limit = settings.SEARCH_EXPORT_ITEMS_PER_PAGE else: limit = settings.SEARCH_ITEMS_PER_PAGE query = Query(se, start=limit*int(page-1), limit=limit) query.add_aggregation(GeoHashGridAgg(field='points', name='grid', precision=settings.HEX_BIN_PRECISION)) query.add_aggregation(GeoBoundsAgg(field='points', name='bounds')) search_query = Bool() if term_filter != '': for term in JSONDeserializer().deserialize(term_filter): if term['type'] == 'term': term_filter = Match(field='strings', query=term['value'], type='phrase') if term['inverted']: search_query.must_not(term_filter) else: search_query.must(term_filter) elif term['type'] == 'concept': concept_ids = _get_child_concepts(term['value']) conceptid_filter = Terms(field='domains.conceptid', terms=concept_ids) if term['inverted']: search_query.must_not(conceptid_filter) else: search_query.must(conceptid_filter) elif term['type'] == 'string': string_filter = Bool() string_filter.should(Match(field='strings', query=term['value'], type='phrase_prefix')) string_filter.should(Match(field='strings.folded', query=term['value'], type='phrase_prefix')) if term['inverted']: search_query.must_not(string_filter) else: search_query.must(string_filter) if 'features' in spatial_filter: if len(spatial_filter['features']) > 0: feature_geom = spatial_filter['features'][0]['geometry'] feature_properties = spatial_filter['features'][0]['properties'] buffer = {'width':0,'unit':'ft'} if 'buffer' in feature_properties: buffer = feature_properties['buffer'] feature_geom = JSONDeserializer().deserialize(_buffer(feature_geom,buffer['width'],buffer['unit']).json) geoshape = GeoShape(field='geometries.features.geometry', type=feature_geom['type'], coordinates=feature_geom['coordinates'] ) invert_spatial_search = False if 'inverted' in feature_properties: invert_spatial_search = feature_properties['inverted'] if invert_spatial_search == True: search_query.must_not(geoshape) else: search_query.must(geoshape) if 'fromDate' in temporal_filter and 'toDate' in temporal_filter: now = str(datetime.utcnow()) start_date = None end_date = None start_year = 'null' end_year = 'null' try: # start_date = parser.parse(temporal_filter['fromDate']) # start_date = start_date.isoformat() sd = FlexiDate.from_str(temporal_filter['fromDate']) start_date = int((sd.as_float()-1970)*31556952*1000) #start_year = parser.parse(start_date).year start_year = sd.year except: pass try: # end_date = parser.parse(temporal_filter['toDate']) # end_date = end_date.isoformat() ed = FlexiDate.from_str(temporal_filter['toDate']) end_date = int((ed.as_float()-1970)*31556952*1000) #end_year = parser.parse(end_date).year end_year = ed.year except: pass # add filter for concepts that define min or max dates sql = None basesql = """ SELECT value.conceptid FROM ( SELECT {select_clause}, v.conceptid FROM public."values" v, public."values" v2 WHERE v.conceptid = v2.conceptid and v.valuetype = 'min_year' and v2.valuetype = 'max_year' ) as value WHERE overlap = true; """ temporal_query = Bool() if 'inverted' not in temporal_filter: temporal_filter['inverted'] = False if temporal_filter['inverted']: # inverted date searches need to use an OR clause and are generally more complicated to structure (can't use ES must_not) # eg: less than START_DATE OR greater than END_DATE select_clause = [] inverted_date_filter = Bool() field = 'dates' if 'dateNodeId' in temporal_filter and temporal_filter['dateNodeId'] != '': field='tiles.data.%s' % (temporal_filter['dateNodeId']) if start_date is not None: inverted_date_filter.should(Range(field=field, lte=start_date)) select_clause.append("(numrange(v.value::int, v2.value::int, '[]') && numrange(null,{start_year},'[]'))") if end_date is not None: inverted_date_filter.should(Range(field=field, gte=end_date)) select_clause.append("(numrange(v.value::int, v2.value::int, '[]') && numrange({end_year},null,'[]'))") if 'dateNodeId' in temporal_filter and temporal_filter['dateNodeId'] != '': date_range_query = Nested(path='tiles', query=inverted_date_filter) temporal_query.should(date_range_query) else: temporal_query.should(inverted_date_filter) select_clause = " or ".join(select_clause) + " as overlap" sql = basesql.format(select_clause=select_clause).format(start_year=start_year, end_year=end_year) else: if 'dateNodeId' in temporal_filter and temporal_filter['dateNodeId'] != '': range = Range(field='tiles.data.%s' % (temporal_filter['dateNodeId']), gte=start_date, lte=end_date) date_range_query = Nested(path='tiles', query=range) temporal_query.should(date_range_query) else: date_range_query = Range(field='dates', gte=start_date, lte=end_date) temporal_query.should(date_range_query) select_clause = """ numrange(v.value::int, v2.value::int, '[]') && numrange({start_year},{end_year},'[]') as overlap """ sql = basesql.format(select_clause=select_clause).format(start_year=start_year, end_year=end_year) # is a dateNodeId is not specified if sql is not None: cursor = connection.cursor() cursor.execute(sql) ret = [str(row[0]) for row in cursor.fetchall()] if len(ret) > 0: conceptid_filter = Terms(field='domains.conceptid', terms=ret) temporal_query.should(conceptid_filter) search_query.must(temporal_query) query.add_query(search_query) return query
def dotest2(fd): out = FlexiDate.from_str("Not a date") assert str(out) == 'None'
def append_to_document(self, document, nodevalue): document['dates'].append( int((FlexiDate.from_str(nodevalue).as_float() - 1970) * 31556952 * 1000))
def append_to_document(self, document, nodevalue): document['dates'].append(int((FlexiDate.from_str(nodevalue).as_float()-1970)*31556952*1000))
def build_search_results_dsl(request): term_filter = request.GET.get('termFilter', '') spatial_filter = JSONDeserializer().deserialize( request.GET.get('mapFilter', '{}')) export = request.GET.get('export', None) page = 1 if request.GET.get('page') == '' else int( request.GET.get('page', 1)) temporal_filter = JSONDeserializer().deserialize( request.GET.get('temporalFilter', '{}')) se = SearchEngineFactory().create() if export != None: limit = settings.SEARCH_EXPORT_ITEMS_PER_PAGE else: limit = settings.SEARCH_ITEMS_PER_PAGE query = Query(se, start=limit * int(page - 1), limit=limit) query.add_aggregation( GeoHashGridAgg(field='points', name='grid', precision=settings.HEX_BIN_PRECISION)) query.add_aggregation(GeoBoundsAgg(field='points', name='bounds')) search_query = Bool() if term_filter != '': for term in JSONDeserializer().deserialize(term_filter): if term['type'] == 'term': term_filter = Match(field='strings', query=term['value'], type='phrase') if term['inverted']: search_query.must_not(term_filter) else: search_query.must(term_filter) elif term['type'] == 'concept': concept_ids = _get_child_concepts(term['value']) conceptid_filter = Terms(field='domains.conceptid', terms=concept_ids) if term['inverted']: search_query.must_not(conceptid_filter) else: search_query.must(conceptid_filter) elif term['type'] == 'string': string_filter = Bool() string_filter.should( Match(field='strings', query=term['value'], type='phrase_prefix')) string_filter.should( Match(field='strings.folded', query=term['value'], type='phrase_prefix')) if term['inverted']: search_query.must_not(string_filter) else: search_query.must(string_filter) if 'features' in spatial_filter: if len(spatial_filter['features']) > 0: feature_geom = spatial_filter['features'][0]['geometry'] feature_properties = spatial_filter['features'][0]['properties'] buffer = {'width': 0, 'unit': 'ft'} if 'buffer' in feature_properties: buffer = feature_properties['buffer'] feature_geom = JSONDeserializer().deserialize( _buffer(feature_geom, buffer['width'], buffer['unit']).json) geoshape = GeoShape(field='geometries.features.geometry', type=feature_geom['type'], coordinates=feature_geom['coordinates']) invert_spatial_search = False if 'inverted' in feature_properties: invert_spatial_search = feature_properties['inverted'] if invert_spatial_search == True: search_query.must_not(geoshape) else: search_query.must(geoshape) if 'fromDate' in temporal_filter and 'toDate' in temporal_filter: now = str(datetime.utcnow()) start_date = None end_date = None start_year = 'null' end_year = 'null' try: # start_date = parser.parse(temporal_filter['fromDate']) # start_date = start_date.isoformat() sd = FlexiDate.from_str(temporal_filter['fromDate']) start_date = int((sd.as_float() - 1970) * 31556952 * 1000) #start_year = parser.parse(start_date).year start_year = sd.year except: pass try: # end_date = parser.parse(temporal_filter['toDate']) # end_date = end_date.isoformat() ed = FlexiDate.from_str(temporal_filter['toDate']) end_date = int((ed.as_float() - 1970) * 31556952 * 1000) #end_year = parser.parse(end_date).year end_year = ed.year except: pass # add filter for concepts that define min or max dates sql = None basesql = """ SELECT value.conceptid FROM ( SELECT {select_clause}, v.conceptid FROM public."values" v, public."values" v2 WHERE v.conceptid = v2.conceptid and v.valuetype = 'min_year' and v2.valuetype = 'max_year' ) as value WHERE overlap = true; """ temporal_query = Bool() if 'inverted' not in temporal_filter: temporal_filter['inverted'] = False if temporal_filter['inverted']: # inverted date searches need to use an OR clause and are generally more complicated to structure (can't use ES must_not) # eg: less than START_DATE OR greater than END_DATE select_clause = [] inverted_date_filter = Bool() field = 'dates' if 'dateNodeId' in temporal_filter and temporal_filter[ 'dateNodeId'] != '': field = 'tiles.data.%s' % (temporal_filter['dateNodeId']) if start_date is not None: inverted_date_filter.should(Range(field=field, lte=start_date)) select_clause.append( "(numrange(v.value::int, v2.value::int, '[]') && numrange(null,{start_year},'[]'))" ) if end_date is not None: inverted_date_filter.should(Range(field=field, gte=end_date)) select_clause.append( "(numrange(v.value::int, v2.value::int, '[]') && numrange({end_year},null,'[]'))" ) if 'dateNodeId' in temporal_filter and temporal_filter[ 'dateNodeId'] != '': date_range_query = Nested(path='tiles', query=inverted_date_filter) temporal_query.should(date_range_query) else: temporal_query.should(inverted_date_filter) select_clause = " or ".join(select_clause) + " as overlap" sql = basesql.format(select_clause=select_clause).format( start_year=start_year, end_year=end_year) else: if 'dateNodeId' in temporal_filter and temporal_filter[ 'dateNodeId'] != '': range = Range(field='tiles.data.%s' % (temporal_filter['dateNodeId']), gte=start_date, lte=end_date) date_range_query = Nested(path='tiles', query=range) temporal_query.should(date_range_query) else: date_range_query = Range(field='dates', gte=start_date, lte=end_date) temporal_query.should(date_range_query) select_clause = """ numrange(v.value::int, v2.value::int, '[]') && numrange({start_year},{end_year},'[]') as overlap """ sql = basesql.format(select_clause=select_clause).format( start_year=start_year, end_year=end_year) # is a dateNodeId is not specified if sql is not None: cursor = connection.cursor() cursor.execute(sql) ret = [str(row[0]) for row in cursor.fetchall()] if len(ret) > 0: conceptid_filter = Terms(field='domains.conceptid', terms=ret) temporal_query.should(conceptid_filter) search_query.must(temporal_query) query.add_query(search_query) return query