def get(self): # NOTE: swagger dictionary is read only once, at server init time swagjson = mem.customizer._definitions # NOTE: changing dinamically options, based on where the client lies from restapi.confs import PRODUCTION from flask import request from utilities.helpers import get_api_url api_url = get_api_url(request, PRODUCTION) scheme, host = api_url.rstrip('/').split('://') swagjson['host'] = host swagjson['schemes'] = [scheme] # Jsonify, so we skip custom response building return jsonify(swagjson)
def get(self, image_id=None): logger.debug("getting NonAVEntity id: %s", image_id) self.graph = self.get_service_instance('neo4j') data = [] if image_id is not None: # check if the image exists try: v = self.graph.NonAVEntity.nodes.get(uuid=image_id) except self.graph.NonAVEntity.DoesNotExist: logger.debug( "NonAVEntity with uuid %s does not exist" % image_id) raise RestApiException( "Please specify a valid image id", status_code=hcodes.HTTP_BAD_NOTFOUND) images = [v] else: images = self.graph.NonAVEntity.nodes.all() api_url = get_api_url(request, PRODUCTION) for v in images: image = self.getJsonResponse( v, max_relationship_depth=1, relationships_expansion=[ 'record_sources.provider', 'item.ownership' ] ) item = v.item.single() # image['links']['self'] = api_url + \ # 'api/images/' + v.uuid image['links']['content'] = api_url + \ 'api/images/' + v.uuid + '/content?type=image' if item.thumbnail is not None: image['links']['thumbnail'] = api_url + \ 'api/images/' + v.uuid + '/content?type=thumbnail' image['links']['summary'] = api_url + \ 'api/images/' + v.uuid + '/content?type=summary' data.append(image) return self.force_response(data)
def get(self, shot_id=None): """ Get shot by id. """ logger.debug("getting Shot id: %s", shot_id) if shot_id is None: raise RestApiException("Please specify a valid shot uuid", status_code=hcodes.HTTP_BAD_NOTFOUND) self.graph = self.get_service_instance('neo4j') input_parameters = self.get_input() content_type = input_parameters['content'] if content_type is not None and content_type != 'thumbnail': raise RestApiException("Bad type parameter: expected 'thumbnail'", status_code=hcodes.HTTP_BAD_REQUEST) # check if the shot exists node = None try: node = self.graph.Shot.nodes.get(uuid=shot_id) except self.graph.Shot.DoesNotExist: logger.debug("Shot with id %s does not exist" % shot_id) raise RestApiException("Please specify a valid shot id", status_code=hcodes.HTTP_BAD_NOTFOUND) if content_type is not None: thumbnail_uri = node.thumbnail_uri logger.debug("thumbnail content uri: %s" % thumbnail_uri) if thumbnail_uri is None: raise RestApiException("Thumbnail not found", status_code=hcodes.HTTP_BAD_NOTFOUND) return send_file(thumbnail_uri, mimetype='image/jpeg') api_url = get_api_url(request, PRODUCTION) shot = self.getJsonResponse(node) shot['links']['self'] = api_url + \ 'api/shots/' + node.uuid shot['links']['thumbnail'] = api_url + \ 'api/shots/' + node.uuid + '?content=thumbnail' return self.force_response(shot)
def get(self, video_id=None): log.debug("getting AVEntity id: %s", video_id) self.graph = self.get_service_instance('neo4j') data = [] if video_id is not None: # check if the video exists try: v = self.graph.AVEntity.nodes.get(uuid=video_id) except self.graph.AVEntity.DoesNotExist: log.debug("AVEntity with uuid %s does not exist" % video_id) raise RestApiException("Please specify a valid video id", status_code=hcodes.HTTP_BAD_NOTFOUND) videos = [v] else: videos = self.graph.AVEntity.nodes.all() api_url = get_api_url(request, PRODUCTION) for v in videos: video = self.getJsonResponse(v, max_relationship_depth=1, relationships_expansion=[ 'record_sources.provider', 'item.ownership', 'item.revision', 'item.other_version' ]) item = v.item.single() # video['links']['self'] = api_url + \ # 'api/videos/' + v.uuid video['links']['content'] = api_url + \ 'api/videos/' + v.uuid + '/content?type=video' if item.thumbnail is not None: video['links']['thumbnail'] = api_url + \ 'api/videos/' + v.uuid + '/content?type=thumbnail' video['links']['summary'] = api_url + \ 'api/videos/' + v.uuid + '/content?type=summary' data.append(video) return self.force_response(data)
def get(self, video_id): log.info("get shots for AVEntity id: %s", video_id) if video_id is None: raise RestApiException("Please specify a video id", status_code=hcodes.HTTP_BAD_REQUEST) self.graph = self.get_service_instance('neo4j') data = [] video = None try: video = self.graph.AVEntity.nodes.get(uuid=video_id) except self.graph.AVEntity.DoesNotExist: log.debug("AVEntity with uuid %s does not exist" % video_id) raise RestApiException("Please specify a valid video id", status_code=hcodes.HTTP_BAD_NOTFOUND) user = self.get_user_if_logged() item = video.item.single() api_url = get_api_url(request, PRODUCTION) annotations = {} annotations_query = """ MATCH (:AVEntity {uuid: '%s'})<-[:CREATION]-(:Item)-[:SHOT]->(shot:Shot)<-[:HAS_TARGET]-(anno:Annotation)-[:HAS_BODY]->(b:AnnotationBody) OPTIONAL MATCH (anno)-[:IS_ANNOTATED_BY]->(creator:User) RETURN shot.uuid, anno, creator, collect(b) """ % video_id log.info("Prefetching annotations...") result = self.graph.cypher(annotations_query) for row in result: shot_uuid = row[0] annotation = self.graph.Annotation.inflate(row[1]) if row[2] is not None: creator = self.graph.User.inflate(row[2]) else: creator = None if annotation.private: if creator is None: log.warning( 'Invalid state: missing creator for private note [UUID: %s]', annotation.uuid) continue if user is None: continue if creator.uuid != user.uuid: continue res = self.getJsonResponse(annotation, max_relationship_depth=0) del (res['links']) # attach creator if annotation.annotation_type in ('TAG', 'DSC', 'LNK'): if creator is not None: res['creator'] = self.getJsonResponse( creator, max_relationship_depth=0) # attach bodies res['bodies'] = [] for concept in row[3]: b = self.graph.AnnotationBody.inflate(concept) b = b.downcast() # most derivative body res['bodies'].append( self.getJsonResponse(b, max_relationship_depth=0)) if shot_uuid not in annotations: annotations[shot_uuid] = [] annotations[shot_uuid].append(res) log.info("Prefetching automatic tags from embedded segments...") query_auto_tags = """ MATCH (:AVEntity {uuid: '%s'})<-[:CREATION]-(:Item)-[:SHOT]->(shot:Shot)-[:WITHIN_SHOT]-(sgm:VideoSegment) MATCH (sgm)<-[:HAS_TARGET]-(anno:Annotation {annotation_type:'TAG', generator:'FHG'})-[:HAS_BODY]-(b:ODBody)-[:CONCEPT]-(res:ResourceBody) RETURN shot.uuid, anno, collect(res) """ % video_id result = self.graph.cypher(query_auto_tags) for row in result: shot_uuid = row[0] if shot_uuid not in annotations: annotations[shot_uuid] = [] auto_anno = self.graph.Annotation.inflate(row[1]) res = self.getJsonResponse(auto_anno, max_relationship_depth=0) del (res['links']) # attach bodies res['bodies'] = [] for concept in row[2]: res['bodies'].append( self.getJsonResponse( self.graph.ResourceBody.inflate(concept), max_relationship_depth=0)) annotations[shot_uuid].append(res) for s in item.shots.order_by('start_frame_idx'): shot = self.getJsonResponse(s) shot_url = api_url + 'api/shots/' + s.uuid shot['links']['self'] = shot_url shot['links']['thumbnail'] = shot_url + '?content=thumbnail' # get all shot annotations: # at the moment filter by vim and tag annotations """ shot['annotations'] = [] for anno in s.annotation.all(): creator = anno.creator.single() if anno.private: if creator is None: log.warn('Invalid state: missing creator for private ' 'note [UUID:{}]'.format(anno.uuid)) continue if user is None or (creator is not None and creator.uuid != user.uuid): continue res = self.getJsonResponse(anno, max_relationship_depth=0) del(res['links']) if (anno.annotation_type in ('TAG', 'DSC', 'LNK') and creator is not None): res['creator'] = self.getJsonResponse( anno.creator.single(), max_relationship_depth=0) # attach bodies res['bodies'] = [] for b in anno.bodies.all(): mdb = b.downcast() # most derivative body res['bodies'].append( self.getJsonResponse(mdb, max_relationship_depth=0)) shot['annotations'].append(res) # add automatic tags from "embedded segments" query_auto_tags = "MATCH (s:Shot {{ uuid:'{shot_id}'}})-[:WITHIN_SHOT]-(sgm:VideoSegment) " \ "MATCH (sgm)<-[:HAS_TARGET]-(anno:Annotation {{annotation_type:'TAG', generator:'FHG'}})-[:HAS_BODY]-(b:ODBody)-[:CONCEPT]-(res:ResourceBody) " \ "RETURN anno, collect(res)".format( shot_id=s.uuid) result = self.graph.cypher(query_auto_tags) for row in result: auto_anno = self.graph.Annotation.inflate(row[0]) res = self.getJsonResponse(auto_anno, max_relationship_depth=0) del(res['links']) # attach bodies res['bodies'] = [] for concept in row[1]: res['bodies'].append( self.getJsonResponse(self.graph.ResourceBody.inflate(concept), max_relationship_depth=0)) shot['annotations'].append(res) """ # Retrieving annotations from prefetched data shot['annotations'] = annotations.get(s.uuid, []) data.append(shot) return self.force_response(data)
def post(self): self.initGraph() input_parameters = self.get_input() # at moment ONLY search for creations in a place list is available place_list = input_parameters.get('relevant-list') if place_list is None: raise RestApiException('Only search for relevant place list allowed', status_code=hcodes.HTTP_BAD_REQUEST) if len(place_list) == 0: raise RestApiException('Expected at least one relevant place list', status_code=hcodes.HTTP_BAD_REQUEST) data = [] api_url = get_api_url(request, PRODUCTION) for item in place_list: creation_id = item.get('creation-id') if creation_id is None: raise RestApiException('Missing creation-id', status_code=hcodes.HTTP_BAD_REQUEST) place_ids = item.get('place-ids') if place_ids is None or len(place_ids) == 0: raise RestApiException('Missing place-ids', status_code=hcodes.HTTP_BAD_REQUEST) # logger.debug('creation: {}, places: {}'.format(creation_id, place_ids)) query = "MATCH (n:Creation {{uuid:'{uuid}'}}) " \ "MATCH (n)<-[:CREATION]-(i:Item)<-[:SOURCE]-(anno:Annotation {{annotation_type:'TAG'}})-[:HAS_BODY]-(body:ResourceBody) " \ "WHERE body.iri IN {place_ids} " \ "MATCH (n)-[:HAS_TITLE]->(title:Title) " \ "MATCH (anno)-[:IS_ANNOTATED_BY]-(c:User) " \ "MATCH (n)-[:RECORD_SOURCE]->(:RecordSource)-[:PROVIDED_BY]->(p:Provider) " \ "OPTIONAL MATCH (anno)-[:HAS_TARGET]-(target:Shot) " \ "OPTIONAL MATCH (i)-[:REVISION_BY]-(r:User) " \ "WITH n, collect(distinct title) AS titles, p," \ " i, anno, body, target AS shot, c{{.uuid, .name, .surname, .email}} AS creator, r{{.uuid, .name, .surname}} AS reviser " \ "RETURN n{{.*, type:i.item_type, titles, provider:p.identifier, reviser:reviser}}, collect(anno{{.*, body, shot, creator}})".format( uuid=creation_id, place_ids=place_ids) logger.debug(query) result = self.graph.cypher(query) for row in result: creation_uuid = row[0]['uuid'] creation_type = row[0]['type'] creation = { 'uuid': creation_uuid, 'external_ids': row[0]['external_ids'], 'rights_status': row[0]['rights_status'], 'type': creation_type, 'provider': row[0]['provider'], 'reviser': row[0]['reviser'], } # add some useful links if creation_type == 'Video': creation['links'] = {} creation['links']['content'] = api_url + 'api/videos/' + \ creation_uuid + '/content?type=video' creation['links']['thumbnail'] = api_url + 'api/videos/' + \ creation_uuid + '/content?type=thumbnail' elif creation_type == 'Image': creation['links'] = {} creation['links']['content'] = api_url + 'api/images/' + \ creation_uuid + '/content?type=video' creation['links']['thumbnail'] = api_url + 'api/images/' + \ creation_uuid + '/content?type=thumbnail' # PRODUCTION YEAR: get the first year in the array if 'production_years' in row[0]: creation['year'] = row[0]['production_years'][0] elif 'date_created' in row[0]: creation['year'] = row[0]['date_created'][0] # TITLE if 'identifying_title' in row[0]: creation['title'] = row[0]['identifying_title'] elif 'titles' in row[0] and len(row[0]['titles']) > 0: for t in row[0]['titles']: title_node = self.graph.Title.inflate(t) title = title_node.text if title_node.language is not None and title_node.language == 'en': break creation['title'] = title annotations = [] for col in row[1]: anno = { 'uuid': col['uuid'], 'creation_datetime': col['creation_datetime'], 'annotation_type': col['annotation_type'], 'creator': col['creator'] } body = self.graph.ResourceBody.inflate(col['body']) anno['body'] = { 'iri': body.iri, 'name': body.name, 'spatial': body.spatial } if col['shot'] is not None: shot = self.graph.Shot.inflate(col['shot']) anno['shot'] = { 'uuid': shot.uuid, 'duration': shot.duration, 'shot_num': shot.shot_num, 'start_frame_idx': shot.start_frame_idx, 'end_frame_idx': shot.end_frame_idx, 'timestamp': shot.timestamp } annotations.append(anno) res = { 'source': creation, 'annotations': annotations } data.append(res) return self.force_response(data)
def post(self): self.graph = self.get_service_instance('neo4j') input_parameters = self.get_input() offset, limit = self.get_paging() offset -= 1 logger.debug("paging: offset {0}, limit {1}".format(offset, limit)) if offset < 0: raise RestApiException('Page number cannot be a negative value', status_code=hcodes.HTTP_BAD_REQUEST) if limit < 0: raise RestApiException('Page size cannot be a negative value', status_code=hcodes.HTTP_BAD_REQUEST) # check request for term matching provider = None # TODO: no longer used, to be removed multi_match_query = '' # multi_match = [] # multi_match_where = [] # match = input_parameters.get('match') # if match is not None: # term = match.get('term') # if term is not None: # term = self.graph.sanitize_input(term) # fields = match.get('fields') # if term is not None and (fields is None or len(fields) == 0): # raise RestApiException('Match term fields cannot be empty', # status_code=hcodes.HTTP_BAD_REQUEST) # if fields is None: # fields = [] # multi_match_fields = [] # multi_optional_match = [] # for f in fields: # if f not in self.__class__.allowed_term_fields: # raise RestApiException( # "Bad field: expected one of %s" % # (self.__class__.allowed_term_fields, ), # status_code=hcodes.HTTP_BAD_REQUEST) # if not term: # # catch '*' # break # if f == 'title': # multi_match.append("MATCH (n)-[:HAS_TITLE]->(t:Title)") # multi_match_fields.append('t') # multi_match_where.append( # "t.text =~ '(?i).*{term}.*'".format(term=term)) # elif f == 'description': # multi_match.append( # "OPTIONAL MATCH (n)-[:HAS_DESCRIPTION]->(d:Description)") # multi_match_fields.append('d') # multi_match_where.append( # "d.text =~ '(?i).*{term}.*'".format(term=term)) # elif f == 'keyword': # multi_optional_match.append("OPTIONAL MATCH (n)-[:HAS_KEYWORD]->(k:Keyword)") # multi_match_fields.append('k') # multi_match_where.append( # "k.term =~ '(?i){term}'".format(term=term)) # elif f == 'contributor': # multi_optional_match.append("OPTIONAL MATCH (n)-[:CONTRIBUTED_BY]->(a:Agent)") # multi_match_fields.append('a') # multi_match_where.append( # "ANY(item in a.names where item =~ '(?i).*{term}.*')".format(term=term)) # else: # # should never be reached # raise RestApiException( # 'Unexpected field type', # status_code=hcodes.HTTP_SERVER_ERROR) # if len(multi_match) > 0: # multi_match_query = ' '.join(multi_match) \ # + " " + ' '.join(multi_optional_match) \ # + " WITH n, " + ', '.join(multi_match_fields) \ # + " WHERE " + ' OR '.join(multi_match_where) # check request for filtering filters = [] # add filter for processed content with COMPLETE status filters.append( "MATCH (n)<-[:CREATION]-(:Item)-[:CONTENT_SOURCE]->(content:ContentStage) " + "WHERE content.status = 'COMPLETED'") entity = 'Creation' filtering = input_parameters.get('filter') if filtering is not None: # check item type item_type = filtering.get('type', 'all') if item_type is None: item_type = 'all' else: item_type = item_type.strip().lower() if item_type not in self.__class__.allowed_item_types: raise RestApiException( "Bad item type parameter: expected one of %s" % (self.__class__.allowed_item_types, ), status_code=hcodes.HTTP_BAD_REQUEST) if item_type == 'all': entity = 'Creation' elif item_type == 'video': entity = 'AVEntity' elif item_type == 'image': entity = 'NonAVEntity' else: # should never be reached raise RestApiException('Unexpected item type', status_code=hcodes.HTTP_SERVER_ERROR) # PROVIDER provider = filtering.get('provider') logger.info("provider {0}".format(provider)) #if provider is not None: # filters.append( # "MATCH (n)-[:RECORD_SOURCE]->(:RecordSource)-[:PROVIDED_BY]->(p:Provider)" + # " WHERE p.identifier='{provider}'".format(provider=provider.strip())) # CITY city = filtering.get('city') if city is not None: filters.append( "MATCH (n)-[:RECORD_SOURCE]->(:RecordSource)-[:PROVIDED_BY]->(p:Provider)" + " WHERE p.city='{city}'".format(city=city.strip())) logger.info("city {0}".format(city)) # COUNTRY country = filtering.get('country') if country is not None: country = country.strip().upper() if codelists.fromCode(country, codelists.COUNTRY) is None: raise RestApiException('Invalid country code for: ' + country) filters.append( "MATCH (n)-[:COUNTRY_OF_REFERENCE]->(c:Country) WHERE c.code='{country_ref}'" .format(country_ref=country)) # IPR STATUS iprstatus = filtering.get('iprstatus') if iprstatus is not None: iprstatus = iprstatus.strip() if codelists.fromCode(iprstatus, codelists.RIGHTS_STATUS) is None: raise RestApiException('Invalid IPR status code for: ' + iprstatus) filters.append( "MATCH (n) WHERE n.rights_status = '{iprstatus}'".format( iprstatus=iprstatus)) # PRODUCTION YEAR RANGE missingDate = filtering.get('missingDate') # logger.debug("missingDate: {0}".format(missingDate)) if not missingDate: year_from = filtering.get('yearfrom') year_to = filtering.get('yearto') if year_from is not None or year_to is not None: # set defaults if year is missing year_from = '1890' if year_from is None else str(year_from) year_to = '1999' if year_to is None else str(year_to) # FIXME: this DO NOT work with image date_clauses = [] if item_type == 'video' or item_type == 'all': date_clauses.append( "ANY(item in n.production_years where item >= '{yfrom}') " "and ANY(item in n.production_years where item <= '{yto}')" .format(yfrom=year_from, yto=year_to)) if item_type == 'image' or item_type == 'all': date_clauses.append( "ANY(item in n.date_created where substring(item, 0, 4) >= '{yfrom}') " "and ANY(item in n.date_created where substring(item, 0 , 4) <= '{yto}')" .format(yfrom=year_from, yto=year_to)) filters.append("MATCH (n) WHERE {clauses}".format( clauses=' or '.join(date_clauses))) # TERMS terms = filtering.get('terms') if terms: term_clauses = [] iris = [ term['iri'] for term in terms if 'iri' in term and term['iri'] is not None ] if iris: term_clauses.append('body.iri IN {iris}'.format(iris=iris)) free_terms = [ term['label'] for term in terms if 'iri' not in term or term['iri'] is None and 'label' in term ] if free_terms: term_clauses.append('body.value IN {free_terms}'.format( free_terms=free_terms)) if term_clauses: filters.append( "MATCH (n)<-[:CREATION]-(i:Item)<-[:SOURCE]-(tag:Annotation {{annotation_type:'TAG'}})-[:HAS_BODY]-(body) " "WHERE {clauses}".format( clauses=' or '.join(term_clauses))) match = input_parameters.get('match') fulltext = None if match is not None: term = match.get('term') if term is not None: term = self.graph.sanitize_input(term) term = self.graph.fuzzy_tokenize(term) fulltext = """ CALL db.index.fulltext.queryNodes("titles", '{term}') YIELD node, score WITH node, score MATCH (n:{entity})-[:HAS_TITLE|HAS_DESCRIPTION|HAS_KEYWORD]->(node) """.format(term=term, entity=entity) # RETURN node, n, score # first request to get the number of elements to be returned if fulltext is not None: countv = "{fulltext} {filters} RETURN COUNT(DISTINCT(n))".format( fulltext=fulltext, filters=' '.join(filters)) query = "{fulltext} {filters} " \ "RETURN DISTINCT(n) SKIP {offset} LIMIT {limit}".format( fulltext=fulltext, filters=' '.join(filters), offset=offset * limit, limit=limit) else: countv = "MATCH (n:{entity})" \ " {filters} " \ " {match} " \ " RETURN COUNT(DISTINCT(n))".format( entity=entity, filters=' '.join(filters), match=multi_match_query) query = "MATCH (n:{entity})" \ " {filters} " \ " {match} " \ "RETURN DISTINCT(n) SKIP {offset} LIMIT {limit}".format( entity=entity, filters=' '.join(filters), match=multi_match_query, offset=offset * limit, limit=limit) # logger.debug("QUERY to get number of elements: {0}".format(countv)) # get total number of elements numels = [row[0] for row in self.graph.cypher(countv)][0] logger.debug("Number of elements retrieved: {0}".format(numels)) # logger.debug(query) data = [] result = self.graph.cypher(query) api_url = get_api_url(request, PRODUCTION) for row in result: if 'AVEntity' in row[0].labels: v = self.graph.AVEntity.inflate(row[0]) elif 'NonAVEntity' in row[0].labels: v = self.graph.NonAVEntity.inflate(row[0]) else: # should never be reached raise RestApiException('Unexpected item type', status_code=hcodes.HTTP_SERVER_ERROR) item = v.item.single() if isinstance(v, self.graph.AVEntity): # video video_url = api_url + 'api/videos/' + v.uuid video = self.getJsonResponse(v, max_relationship_depth=1, relationships_expansion=[ 'record_sources.provider', 'item.ownership', 'item.revision' ]) logger.debug("video links %s" % video['links']) video['links']['self'] = video_url video['links']['content'] = video_url + '/content?type=video' if item.thumbnail is not None: video['links']['thumbnail'] = video_url + \ '/content?type=thumbnail' video['links']['summary'] = video_url + '/content?type=summary' data.append(video) elif isinstance(v, self.graph.NonAVEntity): # image image_url = api_url + 'api/images/' + v.uuid image = self.getJsonResponse( v, max_relationship_depth=1, relationships_expansion=[ 'record_sources.provider', 'item.ownership', # 'titles.creation', # 'keywords.creation', # 'descriptions.creation', ]) logger.debug("image links %s" % image['links']) image['links']['self'] = image_url image['links']['content'] = image_url + '/content?type=image' if item.thumbnail is not None: image['links']['thumbnail'] = image_url + \ '/content?type=thumbnail' image['links']['summary'] = image_url + '/content?type=summary' data.append(image) # return also the total number of elements meta_response = {"totalItems": numels} # count result by provider if provider == null if provider is None: count_by_provider_query = "MATCH (n:{entity})" \ " {filters} " \ " {match} " \ "MATCH (n)-[:RECORD_SOURCE]->(r:RecordSource)-[:PROVIDED_BY]->(p:Provider) " \ "WITH distinct p, count(distinct n) as numberOfCreations " \ "RETURN p.identifier, numberOfCreations".format( entity=entity, filters=' '.join(filters), match=multi_match_query) # logger.debug(count_by_provider_query) result_p_count = self.graph.cypher(count_by_provider_query) group_by_providers = {} for row in result_p_count: group_by_providers[row[0]] = row[1] # logger.debug(group_by_providers) meta_response["countByProviders"] = group_by_providers # count result by year count_by_year_query = "MATCH (n:{entity})" \ " {filters} " \ " {match} " \ "WITH distinct n WITH collect(substring(head(n.production_years), 0, 3)) + collect(substring(head(n.date_created), 0, 3)) as years " \ "UNWIND years as row " \ "RETURN row + '0' as decade, count(row) as count order by decade".format( entity=entity, filters=' '.join(filters), match=multi_match_query) # logger.debug(count_by_year_query) result_y_count = self.graph.cypher(count_by_year_query) group_by_years = {} for row in result_y_count: group_by_years[row[0]] = row[1] meta_response["countByYears"] = group_by_years return self.force_response(data, meta=meta_response)