def make_geojson(self, record_index, total_found): """Outputs the record object as GeoJSON""" geo_json = LastUpdatedOrderedDict() geo_json['id'] = '#record-{}-of-{}'.format(record_index, total_found) geo_json['label'] = self.label geo_json['rdfs:isDefinedBy'] = self.uri geo_json['type'] = 'Feature' geo_json['category'] = 'oc-api:geo-record' geometry = LastUpdatedOrderedDict() geometry['id'] = '#record-geom-{}-of-{}'.format( record_index, total_found) geometry['type'] = self.geo_feature_type geometry['coordinates'] = self.geometry_coords geo_json['geometry'] = geometry if (self.early_date is not None and self.late_date is not None): # If we have dates, add them. when = LastUpdatedOrderedDict() when['id'] = '#record-event-{}-of-{}'.format( record_index, total_found) when['type'] = 'oc-gen:formation-use-life' # convert numeric to GeoJSON-LD ISO 8601 when['start'] = ISOyears().make_iso_from_float(self.early_date) when['stop'] = ISOyears().make_iso_from_float(self.late_date) geo_json['when'] = when # Now add the properties dict to the GeoJSON props_id_value = '#rec-{}-of-{}'.format(record_index, total_found) geo_json['properties'] = self.make_client_properties_dict( id_value=props_id_value, feature_type='item record') return geo_json
def add_when_json(self, act_dict, uuid, item_type, event): """ adds when (time interval or instant) data """ when = LastUpdatedOrderedDict() when['id'] = '#event-when-' + str(event.event_id) when['type'] = event.when_type when['type'] = event.meta_type if (event.earliest != event.start): # when['earliest'] = int(event.earliest) pass when['start'] = ISOyears().make_iso_from_float(event.start) when['stop'] = ISOyears().make_iso_from_float(event.stop) if event.latest != event.stop: # when['latest'] = int(event.latest) pass if event.uuid != uuid: # we're inheriting / inferring event metadata from a parent context when['reference-type'] = 'inferred' when['reference-uri'] = URImanagement.make_oc_uri( event.uuid, 'subjects', self.cannonical_uris) rel_meta = self.item_gen_cache.get_entity(event.uuid) if rel_meta is not False: when['reference-label'] = rel_meta.label else: # metadata is specified for this specific item when['reference-type'] = 'specified' when['reference-label'] = self.manifest.label if self.assertion_hashes: when['hash_id'] = event.hash_id act_dict['when'] = when return act_dict
def process_solr_tiles(self, solr_tiles): """ processes the solr_json discovery geo tiles, aggregating to a certain depth """ # first aggregate counts for tile that belong togther aggregate_tiles = self.aggregate_spatial_tiles(solr_tiles) # now generate GeoJSON for each tile region # print('Total tiles: ' + str(t) + ' reduced to ' + str(len(aggregate_tiles))) i = 0 for tile_key, aggregate_count in aggregate_tiles.items(): i += 1 add_region = True fl = FilterLinks() fl.base_request_json = self.filter_request_dict_json fl.spatial_context = self.spatial_context new_rparams = fl.add_to_request('disc-geotile', tile_key) record = LastUpdatedOrderedDict() record['id'] = fl.make_request_url(new_rparams) record['json'] = fl.make_request_url(new_rparams, '.json') record['count'] = aggregate_count record['type'] = 'Feature' record['category'] = 'oc-api:geo-facet' if self.min_date is not False \ and self.max_date is not False: when = LastUpdatedOrderedDict() when['id'] = '#event-' + tile_key when['type'] = 'oc-gen:formation-use-life' # convert numeric to GeoJSON-LD ISO 8601 when['start'] = ISOyears().make_iso_from_float(self.min_date) when['stop'] = ISOyears().make_iso_from_float(self.max_date) record['when'] = when gm = GlobalMercator() geo_coords = gm.quadtree_to_geojson_poly_coords(tile_key) geometry = LastUpdatedOrderedDict() geometry['id'] = '#geo-disc-tile-geom-' + tile_key geometry['type'] = 'Polygon' geometry['coordinates'] = geo_coords record['geometry'] = geometry properties = LastUpdatedOrderedDict() properties['id'] = '#geo-disc-tile-' + tile_key properties['href'] = record['id'] properties['label'] = 'Discovery region (' + str(i) + ')' properties['feature-type'] = 'discovery region (facet)' properties['count'] = aggregate_count properties['early bce/ce'] = self.min_date properties['late bce/ce'] = self.max_date record['properties'] = properties if len(tile_key) >= 6: if tile_key[:6] == '211111': # no bad coordinates (off 0, 0 coast of Africa) add_region = False # don't display items without coordinates if add_region: self.geojson_regions.append(record)
def get_geo_event_metadata(self): """ gets geospatial and event metadata for the item """ if self.is_valid: act_contain = Containment() if self.manifest.item_type == 'subjects': parents = act_contain.get_parents_by_child_uuid( self.manifest.uuid) subject_list = act_contain.contexts_list subject_list.insert(0, self.manifest.uuid) self.geo_meta = act_contain.get_geochron_from_subject_list( subject_list, 'geo') self.event_meta = act_contain.get_geochron_from_subject_list( subject_list, 'event') else: self.geo_meta = act_contain.get_related_geochron( self.manifest.uuid, self.manifest.item_type, 'geo') self.event_meta = act_contain.get_related_geochron( self.manifest.uuid, self.manifest.item_type, 'event') if self.event_meta is not False and self.event_meta is not None: start = None stop = None for event in self.event_meta: if start is None: start = event.start if stop is None: stop = event.stop if start < event.start: start = event.start if stop > event.stop: stop = event.stop if stop is None: stop = start if start is not None: if stop < start: stop_temp = start start = stop stop = stop_temp # we have a start year, so make a temporal value in ISON 8601 format self.temporal = ISOyears().make_iso_from_float(start) if stop != start: # stop year different from start, so add a / sep and the stop # year in ISO 8601 format self.temporal += '/' + ISOyears().make_iso_from_float( stop) if self.temporal is None and self.manifest.item_type == 'projects': # get project teporal metadata via solr # now query Solr for temporal data cq = CompleteQuery() payload = {'proj': self.manifest.slug} ass_metadata = cq.get_json_query(payload, None) if 'dc-terms:temporal' in ass_metadata: self.temporal = ass_metadata['dc-terms:temporal']
def get_period_numeric_year(self, period, start_stop='start'): """ gets a year, if it exists and translates from ISO 8601 values to numeric BCE / CE """ output = False if start_stop in period: act_dict = period[start_stop] if 'in' in act_dict: act_dict = act_dict['in'] if 'year' in act_dict: iso_years = ISOyears() output = iso_years.make_float_from_iso(act_dict['year']) return output
def _add_when_object_to_feature_option(self, id_suffix, option): """Adds a when object to a feature option""" # Add some general chronology information to the # geospatial tile. if (self.min_date is None or self.max_date is None): return option when = LastUpdatedOrderedDict() when['id'] = '#event-{}'.format(id_suffix) when['type'] = 'oc-gen:formation-use-life' # convert numeric to GeoJSON-LD ISO 8601 when['start'] = ISOyears().make_iso_from_float(self.min_date) when['stop'] = ISOyears().make_iso_from_float(self.max_date) option['when'] = when return option
def _process_chrono(self): """ Finds chronological / date ranges in GeoJSON features for indexing. More than 1 date range per item is OK. """ self.chrono_specified = False if 'features' in self.oc_item.json_ld: for feature in self.oc_item.json_ld['features']: bad_time = False try: # time is in ISO 8601 time iso_start = feature['when']['start'] except KeyError: bad_time = True try: # time is in ISO 8601 time iso_stop = feature['when']['stop'] except KeyError: bad_time = True try: when_type = feature['when']['type'] except KeyError: when_type = False try: ref_type = feature['when']['reference-type'] if ref_type == 'specified': self.chrono_specified = True except KeyError: ref_type = False if when_type == 'oc-gen:formation-use-life' \ and bad_time is False: # convert GeoJSON-LD ISO 8601 to numeric start = ISOyears().make_float_from_iso(iso_start) stop = ISOyears().make_float_from_iso(iso_stop) chrono_tile = ChronoTile() if 'form_use_life_chrono_tile' not in self.fields: self.fields['form_use_life_chrono_tile'] = [] if 'form_use_life_chrono_earliest' not in self.fields: self.fields['form_use_life_chrono_earliest'] = [] if 'form_use_life_chrono_latest' not in self.fields: self.fields['form_use_life_chrono_latest'] = [] self.fields['form_use_life_chrono_tile'].append( chrono_tile.encode_path_from_bce_ce( start, stop, '10M-' ) ) self.fields['form_use_life_chrono_earliest'].append(start) self.fields['form_use_life_chrono_latest'].append(stop)
def make_date_range(self, period): """ gets a year, if it exists and translates from ISO 8601 values to numeric BCE / CE """ output = False iso_years = ISOyears() start_date = self.get_period_numeric_year(period, 'start') if isinstance(start_date, float): start_date = int(start_date) start_date = iso_years.bce_ce_suffix(start_date) output = start_date end_date = self.get_period_numeric_year(period, 'stop') if isinstance(end_date, float): end_date = int(end_date) end_date = iso_years.bce_ce_suffix(end_date) output += ' - ' + end_date # print('Range: ' + output) return output
def get_geo_event_metadata(self): """ gets geospatial and event metadata for the item """ if self.is_valid: act_contain = Containment() if self.manifest.item_type == 'subjects': parents = act_contain.get_parents_by_child_uuid(self.manifest.uuid) subject_list = act_contain.contexts_list subject_list.insert(0, self.manifest.uuid) self.geo_meta = act_contain.get_geochron_from_subject_list(subject_list, 'geo') self.event_meta = act_contain.get_geochron_from_subject_list(subject_list, 'event') if self.event_meta is not False and self.event_meta is not None: start = None stop = None for event in self.event_meta: if start is None: start = event.start if stop is None: stop = event.stop if start < event.start: start = event.start if stop > event.stop: stop = event.stop if stop is None: stop = start if start is not None: if stop < start: stop_temp = start start = stop stop = stop_temp # we have a start year, so make a temporal value in ISON 8601 format self.temporal = ISOyears().make_iso_from_float(start) if stop != start: # stop year different from start, so add a / sep and the stop # year in ISO 8601 format self.temporal += '/' + ISOyears().make_iso_from_float(stop)
def add_form_use_life_date_range(self, solr_json, iso_year_format=True): """Adds earliest and latest form-use-life dates""" meta_configs = [ ( # Earliest date items formed, used, or alive. 'start', (configs.STATS_FIELDS_PATH_KEYS + [ 'form_use_life_chrono_earliest', 'min', ]), ), ( # Latest date items formed, used, or alive 'stop', (configs.STATS_FIELDS_PATH_KEYS + [ 'form_use_life_chrono_latest', 'max', ])), ] all_dates = [] for json_ld_key, path_keys_list in meta_configs: act_date = utilities.get_dict_path_value(path_keys_list, solr_json) if act_date is None: # We don't have a date for this. continue all_dates.append(act_date) if iso_year_format and act_date is not None: act_date = ISOyears().make_iso_from_float(act_date) self.result[json_ld_key] = act_date if not len(all_dates): # We don't have dates, so skip out. return None # Set the query result minimum and maximum date range. self.min_date = min(all_dates) self.max_date = max(all_dates)
def process_solr_tiles(self, solr_tiles): """ processes the solr_json discovery geo tiles, aggregating to a certain depth """ # first aggregate counts for tile that belong togther aggregate_tiles = LastUpdatedOrderedDict() i = -1 t = 0 if len(solr_tiles) <= 10: # don't aggregate if there's not much to aggregate self.aggregation_depth = self.max_depth for tile_key in solr_tiles[::2]: t += 1 i += 2 solr_facet_count = solr_tiles[i] if tile_key != 'false': if self.limiting_tile is False: ok_to_add = True else: # constrain to show facets ONLY within # the current queried tile if self.limiting_tile in tile_key: ok_to_add = True else: ok_to_add = False if ok_to_add: # first get full date range for # facets that are OK to add chrono_t = ChronoTile() dates = chrono_t.decode_path_dates(tile_key) if isinstance(dates, dict): if self.min_date is False: self.min_date = dates['earliest_bce'] self.max_date = dates['latest_bce'] else: if self.min_date > dates['earliest_bce']: self.min_date = dates['earliest_bce'] if self.max_date < dates['latest_bce']: self.max_date = dates['latest_bce'] # now aggregrate the OK to use facets trim_tile_key = tile_key[:self.aggregation_depth] if trim_tile_key not in aggregate_tiles: aggregate_tiles[trim_tile_key] = 0 aggregate_tiles[trim_tile_key] += solr_facet_count # now generate GeoJSON for each tile region # print('Chronology tiles: ' + str(t) + ' reduced to ' + str(len(aggregate_tiles))) # -------------------------------------------- # code to sort the list of tiles by start date and time span # -------------------------------------------- sorting_ranges = [] for tile_key, aggregate_count in aggregate_tiles.items(): chrono_t = ChronoTile() dates = chrono_t.decode_path_dates(tile_key) dates['tile_key'] = tile_key sorting_ranges.append(dates) # now sort by earliest bce, then reversed latest bce # this makes puts early dates with longest timespans first sorted_ranges = sorted(sorting_ranges, key=lambda k: (k['earliest_bce'], -k['latest_bce'])) sorted_tiles = LastUpdatedOrderedDict() for sort_range in sorted_ranges: tile_key = sort_range['tile_key'] sorted_tiles[tile_key] = aggregate_tiles[tile_key] i = 0 for tile_key, aggregate_count in sorted_tiles.items(): i += 1 fl = FilterLinks() fl.base_request_json = self.filter_request_dict_json fl.spatial_context = self.spatial_context new_rparams = fl.add_to_request('form-chronotile', tile_key) record = LastUpdatedOrderedDict() record['id'] = fl.make_request_url(new_rparams) record['json'] = fl.make_request_url(new_rparams, '.json') record['count'] = aggregate_count record['category'] = 'oc-api:chrono-facet' chrono_t = ChronoTile() dates = chrono_t.decode_path_dates(tile_key) # convert numeric to GeoJSON-LD ISO 8601 record['start'] = ISOyears().make_iso_from_float( dates['earliest_bce']) record['stop'] = ISOyears().make_iso_from_float( dates['latest_bce']) properties = LastUpdatedOrderedDict() properties['early bce/ce'] = dates['earliest_bce'] properties['late bce/ce'] = dates['latest_bce'] record['properties'] = properties self.chrono_tiles.append(record)
def process_geo(self): """ processes the solr_json discovery geo tiles, aggregating to a certain depth """ if isinstance(self.geo_pivot, list): i = 0 for proj in self.geo_pivot: i += 1 add_feature = True project_key = proj['value'] proj_ex = project_key.split('___') slug = proj_ex[0] uri = self.make_url_from_val_string(proj_ex[2]) href = self.make_url_from_val_string(proj_ex[2], False) label = proj_ex[3] fl = FilterLinks() fl.base_request_json = self.filter_request_dict_json fl.spatial_context = self.spatial_context new_rparams = fl.add_to_request('proj', slug) if 'response' in new_rparams: new_rparams.pop('response', None) record = LastUpdatedOrderedDict() record['id'] = fl.make_request_url(new_rparams) record['json'] = fl.make_request_url(new_rparams, '.json') record['count'] = proj['count'] record['type'] = 'Feature' record['category'] = 'oc-api:geo-project' min_date = False max_date = False if project_key in self.projects: min_date = self.projects[project_key]['min_date'] max_date = self.projects[project_key]['max_date'] if min_date is not False \ and max_date is not False: when = LastUpdatedOrderedDict() when['id'] = '#event-' + slug when['type'] = 'oc-gen:formation-use-life' # convert numeric to GeoJSON-LD ISO 8601 when['start'] = ISOyears().make_iso_from_float( self.projects[project_key]['min_date']) when['stop'] = ISOyears().make_iso_from_float( self.projects[project_key]['max_date']) record['when'] = when if 'pivot' not in proj: add_feature = False else: geometry = LastUpdatedOrderedDict() geometry['id'] = '#geo-geom-' + slug geometry['type'] = 'Point' pivot_count_total = 0 total_lon = 0 total_lat = 0 for geo_data in proj['pivot']: pivot_count_total += geo_data['count'] gm = GlobalMercator() bounds = gm.quadtree_to_lat_lon(geo_data['value']) mean_lon = (bounds[1] + bounds[3]) / 2 mean_lat = (bounds[0] + bounds[2]) / 2 total_lon += mean_lon * geo_data['count'] total_lat += mean_lat * geo_data['count'] weighted_mean_lon = total_lon / pivot_count_total weighted_mean_lat = total_lat / pivot_count_total geometry['coordinates'] = [ weighted_mean_lon, weighted_mean_lat ] record['geometry'] = geometry # now make a link to search records for this project fl = FilterLinks() fl.spatial_context = self.spatial_context new_rparams = fl.add_to_request('proj', slug) search_link = fl.make_request_url(new_rparams) properties = LastUpdatedOrderedDict() properties['id'] = '#geo-proj-' + slug properties['uri'] = uri properties['href'] = href properties['search'] = search_link properties['label'] = label properties['feature-type'] = 'project ' properties['count'] = proj['count'] properties['early bce/ce'] = min_date properties['late bce/ce'] = max_date record['properties'] = properties if add_feature: self.geojson_projects.append(record)
def process_solr_polygons(self, solr_polygons): """ processes the solr_json discovery geo tiles, aggregating to a certain depth """ if self.response_zoom_scope >= self.polygon_min_zoom_scope: # we're at a zoom level small enough to make it # worthwile to return complex contained-in polygon features self.get_polygon_db_objects(solr_polygons) i = 0 cnt_i = -1 for poly_key in solr_polygons[::2]: cnt_i += 2 solr_facet_count = solr_polygons[cnt_i] parsed_key = self.parse_solr_value_parts(poly_key) # print('Key: ' + str(parsed_key)) uuid = parsed_key['uuid'] if isinstance(uuid, str): if uuid in self.subjects_objs \ and uuid in self.geo_objs: # we have Subjects and Geospatial models for this # uuid subj_obj = self.subjects_objs[uuid] geo_obj = self.geo_objs[uuid] i += 1 fl = FilterLinks() fl.base_request_json = self.filter_request_dict_json fl.spatial_context = self.spatial_context new_rparams = fl.add_to_request( 'path', subj_obj.context) record = LastUpdatedOrderedDict() record['id'] = fl.make_request_url(new_rparams) record['json'] = fl.make_request_url( new_rparams, '.json') record['count'] = solr_facet_count record['type'] = 'Feature' record['category'] = 'oc-api:geo-contained-in-feature' if self.min_date is not False \ and self.max_date is not False: when = LastUpdatedOrderedDict() when['id'] = '#event-feature-' + uuid when['type'] = 'oc-gen:formation-use-life' # convert numeric to GeoJSON-LD ISO 8601 when['start'] = ISOyears().make_iso_from_float( self.min_date) when['stop'] = ISOyears().make_iso_from_float( self.max_date) record['when'] = when geometry = LastUpdatedOrderedDict() geometry['id'] = '#geo-disc-feature-geom-' + uuid geometry['type'] = geo_obj.ftype coord_obj = json.loads(geo_obj.coordinates) v_geojson = ValidateGeoJson() coord_obj = v_geojson.fix_geometry_rings_dir( geo_obj.ftype, coord_obj) geometry['coordinates'] = coord_obj record['geometry'] = geometry properties = LastUpdatedOrderedDict() properties['id'] = '#geo-disc-feature-' + uuid properties['href'] = record['id'] properties['item-href'] = parsed_key['href'] properties['label'] = subj_obj.context properties['feature-type'] = 'containing-region' properties['count'] = solr_facet_count properties['early bce/ce'] = self.min_date properties['late bce/ce'] = self.max_date record['properties'] = properties dump = json.dumps(record, ensure_ascii=False, indent=4) geojson_obj = geojson.loads(dump) self.geojson_features.append(record)
def process_solr_recs(self, solr_recs): """ processes the solr_json to make GeoJSON records """ i = self.rec_start for solr_rec in solr_recs: i += 1 record = LastUpdatedOrderedDict() rec_props_obj = RecordProperties(self.response_dict_json) rec_props_obj.entities = self.entities rec_props_obj.min_date = self.min_date rec_props_obj.max_date = self.max_date rec_props_obj.highlighting = self.highlighting rec_props_obj.flatten_rec_attributes = self.flatten_rec_attributes rec_props_obj.rec_attributes = self.rec_attributes rec_props_obj.parse_solr_record(solr_rec) self.entities = rec_props_obj.entities # add to existing list of entities, reduce lookups record['id'] = '#record-' + str(i) + '-of-' + str(self.total_found) if rec_props_obj.label is False: record['label'] = 'Record ' + str(i) + ' of ' + str(self.total_found) else: record['label'] = rec_props_obj.label if rec_props_obj.uri is not False: record['rdfs:isDefinedBy'] = rec_props_obj.uri if rec_props_obj.latitude is not False \ and rec_props_obj.longitude is not False: geometry = LastUpdatedOrderedDict() geometry['id'] = '#geo-rec-geom-' + str(i) + '-of-' + str(self.total_found) geometry['type'] = 'Point' geometry['coordinates'] = [rec_props_obj.longitude, rec_props_obj.latitude] record['type'] = 'Feature' record['category'] = 'oc-api:geo-record' record['geometry'] = geometry else: geometry = False if rec_props_obj.early_date is not False \ and rec_props_obj.late_date is not False: when = LastUpdatedOrderedDict() when['id'] = '#event-rec-when-' + str(i) + '-of-' + str(self.total_found) when['type'] = 'oc-gen:formation-use-life' # convert numeric to GeoJSON-LD ISO 8601 when['start'] = ISOyears().make_iso_from_float(rec_props_obj.early_date) when['stop'] = ISOyears().make_iso_from_float(rec_props_obj.late_date) record['when'] = when # start adding GeoJSON properties properties = LastUpdatedOrderedDict() properties['id'] = '#rec-' + str(i) + '-of-' + str(self.total_found) properties['feature-type'] = 'item record' properties['uri'] = rec_props_obj.uri properties['href'] = rec_props_obj.href properties['citation uri'] = rec_props_obj.cite_uri properties['label'] = rec_props_obj.label properties['project label'] = rec_props_obj.project_label properties['project href'] = rec_props_obj.project_href properties['context label'] = rec_props_obj.context_label properties['context href'] = rec_props_obj.context_href properties['early bce/ce'] = rec_props_obj.early_date properties['late bce/ce'] = rec_props_obj.late_date properties['item category'] = rec_props_obj.category if rec_props_obj.snippet is not False: properties['snippet'] = rec_props_obj.snippet properties['thumbnail'] = rec_props_obj.thumbnail_scr properties['published'] = rec_props_obj.published properties['updated'] = rec_props_obj.updated if isinstance(rec_props_obj.other_attributes, list): for attribute in rec_props_obj.other_attributes: prop_key = attribute['property'] prop_key = rec_props_obj.prevent_attribute_key_collision(properties, prop_key) if self.flatten_rec_attributes: properties[prop_key] = attribute['value'] else: properties[prop_key] = attribute['values_list'] record['properties'] = properties if geometry is not False: # add to list of geospatial records self.geojson_recs.append(record) else: # case when the record is not GeoSpatial in nature item = SolrUUIDs().make_item_dict_from_rec_props_obj(rec_props_obj, False) self.non_geo_recs.append(item)
def process_solr_recs(self, solr_recs): """ processes the solr_json to make GeoJSON records """ # check database for complex geo objects for all of these records db_geo = self.get_recs_complex_geo_features(solr_recs) if self.get_all_media: self.do_media_thumbs = False thumbnail_data = self.get_media_thumbs(solr_recs) media_file_data = self.get_all_media_files(solr_recs) string_attrib_data = self.get_string_rec_attributes(solr_recs) i = self.rec_start for solr_rec in solr_recs: i += 1 record = LastUpdatedOrderedDict() rec_props_obj = RecordProperties(self.response_dict_json) rec_props_obj.min_date = self.min_date rec_props_obj.max_date = self.max_date rec_props_obj.highlighting = self.highlighting rec_props_obj.flatten_rec_attributes = self.flatten_rec_attributes rec_props_obj.rec_attributes = self.rec_attributes rec_props_obj.thumbnail_data = thumbnail_data rec_props_obj.media_file_data = media_file_data rec_props_obj.string_attrib_data = string_attrib_data rec_props_obj.parse_solr_record(solr_rec) record['id'] = '#record-' + str(i) + '-of-' + str(self.total_found) if rec_props_obj.label is False: record['label'] = 'Record ' + str(i) + ' of ' + str( self.total_found) else: record['label'] = rec_props_obj.label if rec_props_obj.uri is not False: record['rdfs:isDefinedBy'] = rec_props_obj.uri if rec_props_obj.latitude is not False \ and rec_props_obj.longitude is not False: # check to see if there are complex geo objects for this item geometry = self.get_item_complex_geo_feature( i, solr_rec['uuid'], db_geo) if geometry is False: geometry = LastUpdatedOrderedDict() geometry['id'] = '#geo-rec-geom-' + str(i) + '-of-' + str( self.total_found) geometry['type'] = 'Point' geometry['coordinates'] = [ rec_props_obj.longitude, rec_props_obj.latitude ] record['type'] = 'Feature' record['category'] = 'oc-api:geo-record' record['geometry'] = geometry else: geometry = False if rec_props_obj.early_date is not False \ and rec_props_obj.late_date is not False: when = LastUpdatedOrderedDict() when['id'] = '#event-rec-when-' + str(i) + '-of-' + str( self.total_found) when['type'] = 'oc-gen:formation-use-life' # convert numeric to GeoJSON-LD ISO 8601 when['start'] = ISOyears().make_iso_from_float( rec_props_obj.early_date) when['stop'] = ISOyears().make_iso_from_float( rec_props_obj.late_date) record['when'] = when # start adding GeoJSON properties properties = LastUpdatedOrderedDict() properties['id'] = '#rec-' + str(i) + '-of-' + str( self.total_found) properties['feature-type'] = 'item record' properties['uri'] = rec_props_obj.uri properties['href'] = rec_props_obj.href properties['citation uri'] = rec_props_obj.cite_uri properties['label'] = rec_props_obj.label properties['project label'] = rec_props_obj.project_label properties['project href'] = rec_props_obj.project_href properties['context label'] = rec_props_obj.context_label properties['context href'] = rec_props_obj.context_href properties['early bce/ce'] = rec_props_obj.early_date properties['late bce/ce'] = rec_props_obj.late_date properties['item category'] = rec_props_obj.category if rec_props_obj.human_remains_flagged: # the record is flagged to relate to human remains properties[ 'human remains flagged'] = rec_props_obj.human_remains_flagged if rec_props_obj.snippet is not False: properties['snippet'] = rec_props_obj.snippet properties['thumbnail'] = rec_props_obj.thumbnail_scr if rec_props_obj.preview_scr is not False: properties['preview'] = rec_props_obj.preview_scr if rec_props_obj.fullfile_scr is not False: properties['primary-file'] = rec_props_obj.fullfile_scr properties['published'] = rec_props_obj.published properties['updated'] = rec_props_obj.updated if isinstance(rec_props_obj.other_attributes, list): for attribute in rec_props_obj.other_attributes: prop_key = attribute['property'] prop_key = rec_props_obj.prevent_attribute_key_collision( properties, prop_key) if self.flatten_rec_attributes: properties[prop_key] = attribute['value'] else: properties[prop_key] = attribute['values_list'] record['properties'] = properties if geometry is not False: # add to list of geospatial records self.geojson_recs.append(record) else: # case when the record is not GeoSpatial in nature item = SolrUUIDs().make_item_dict_from_rec_props_obj( rec_props_obj, False) self.non_geo_recs.append(item)
def make_chronology_facet_options(self, solr_json): """Makes chronology facets from a solr_json response""" chrono_path_keys = ( configs.FACETS_SOLR_ROOT_PATH_KEYS + ['form_use_life_chrono_tile'] ) chrono_val_count_list = utilities.get_dict_path_value( chrono_path_keys, solr_json, default=[] ) if not len(chrono_val_count_list): return None options_tuples = utilities.get_facet_value_count_tuples( chrono_val_count_list ) if not len(options_tuples): return None # Check to see if the client included any request parameters # that limited the chronological range of the request. self._set_client_earliest_latest_limits() valid_tile_dicts = self._make_valid_options_tile_dicts( options_tuples ) if not len(valid_tile_dicts): # None of the chronological tiles are valid # given the query requirements. return None # Determine the aggregation depth needed to group chronological # tiles together into a reasonable number of options. self._get_tile_aggregation_depth(valid_tile_dicts) aggregate_tiles = {} for tile_dict in valid_tile_dicts: # Now aggregate the tiles. trim_tile_key = tile_dict['tile_key'][:self.default_aggregation_depth] if trim_tile_key not in aggregate_tiles: # Make the aggregate tile dictionary # object. chrono_t = ChronoTile() agg_dict = chrono_t.decode_path_dates(trim_tile_key) if (self.min_date is not None and agg_dict['earliest_bce'] < self.min_date): # The aggregated date range looks too early, so # set it to the earliest allowed. agg_dict['earliest_bce'] = self.min_date if (self.max_date is not None and agg_dict['latest_bce'] > self.max_date): # The aggregated date range looks too late, so # set it to the latest date range allowed. agg_dict['latest_bce'] = self.max_date agg_dict['tile_key'] = trim_tile_key agg_dict['count'] = 0 aggregate_tiles[trim_tile_key] = agg_dict aggregate_tiles[trim_tile_key]['count'] += tile_dict['count'] agg_tile_list = [tile_dict for _, tile_dict in aggregate_tiles.items()] # Now sort by earliest bce, then reversed latest bce # this makes puts early dates with longest timespans first sorted_agg_tiles = sorted( agg_tile_list, key=lambda k: (k['earliest_bce'], -k['latest_bce']) ) options = [] for tile_dict in sorted_agg_tiles: sl = SearchLinks( request_dict=copy.deepcopy(self.request_dict), base_search_url=self.base_search_url ) # Remove non search related params. sl.remove_non_query_params() # Update the request dict for this facet option. sl.replace_param_value( 'form-chronotile', match_old_value=None, new_value=tile_dict['tile_key'], ) sl.replace_param_value( 'form-start', match_old_value=None, new_value=tile_dict['earliest_bce'], ) sl.replace_param_value( 'form-stop', match_old_value=None, new_value=tile_dict['latest_bce'], ) urls = sl.make_urls_from_request_dict() if urls['html'] == self.current_filters_url: # The new URL matches our current filter # url, so don't add this facet option. continue option = LastUpdatedOrderedDict() option['id'] = urls['html'] option['json'] = urls['json'] option['count'] = tile_dict['count'] option['category'] = 'oc-api:chrono-facet' option['start'] = ISOyears().make_iso_from_float( tile_dict['earliest_bce'] ) option['stop'] = ISOyears().make_iso_from_float( tile_dict['latest_bce'] ) properties = LastUpdatedOrderedDict() properties['early bce/ce'] = tile_dict['earliest_bce'] properties['late bce/ce'] = tile_dict['latest_bce'] option['properties'] = properties options.append(option) return options