def print_standard_type(self) -> str: from openatlas.models.node import Node if not self.class_.standard_type: return '' root_id = Node.get_hierarchy(self.class_.standard_type).id for node in self.nodes: if node.root and node.root[-1] == root_id: return link(node) return ''
def display_file_api(filename: str) -> Any: # pragma: no cover parser = image_parser.parse_args() from pathlib import Path as Pathlib_path entity = Entity.get_by_id(int(Pathlib_path(filename).stem), nodes=True) license_ = None for node in entity.nodes: if node.root and node.root[-1] == Node.get_hierarchy('License').id: license_ = node.name if not license_: raise AccessDeniedError if parser['download']: return send_file(str(app.config['UPLOAD_DIR']) + '/' + filename, as_attachment=True) return send_from_directory(app.config['UPLOAD_DIR'], filename)
def get(filename: str) -> Response: from pathlib import Path as Pathlib_path entity = Entity.get_by_id(int(Pathlib_path(filename).stem), nodes=True) license_ = None for node in entity.nodes: if node.root and node.root[-1] == Node.get_hierarchy('License').id: license_ = node.name if not license_: raise AccessDeniedError parser = image.parse_args() if parser['download']: return send_file(f"{app.config['UPLOAD_DIR']}/{filename}", as_attachment=True) if parser['image_size']: size = app.config['IMAGE_SIZE'][parser['image_size']] return send_from_directory( f"{app.config['RESIZED_IMAGES']}/{size}", filename) return send_from_directory(app.config['UPLOAD_DIR'], filename)
def test_duplicates(self) -> None: with app.app_context(): # type: ignore with app.test_request_context(): app.preprocess_request() # type: ignore event = Entity.insert('acquisition', 'Event Horizon') source = Entity.insert('source', 'Tha source') source.link('P67', event) source.link('P67', event) source_node = Node.get_hierarchy('Source') source.link('P2', g.nodes[source_node.subs[0]]) source.link('P2', g.nodes[source_node.subs[1]]) rv = self.app.get(url_for('admin_check_link_duplicates')) assert b'Event Horizon' in rv.data rv = self.app.get(url_for('admin_check_link_duplicates', delete='delete'), follow_redirects=True) assert b'Remove' in rv.data rv = self.app.get(url_for('admin_delete_single_type_duplicate', entity_id=source.id, node_id=source_node.subs[0]), follow_redirects=True) assert b'Congratulations, everything looks fine!' in rv.data
def add_reference_systems(form: Any, form_name: str) -> None: precision_nodes = Node.get_hierarchy('External reference match').subs precisions = [('', '')] + [(str(g.nodes[id_].id), g.nodes[id_].name) for id_ in precision_nodes] systems = list(g.reference_systems.values()) systems.sort(key=lambda x: x.name.casefold()) for system in systems: if form_name \ not in [form_['name'] for form_ in system.get_forms().values()]: continue setattr( form, f'reference_system_id_{system.id}', StringField(uc_first(system.name), [OptionalValidator()], description=system.description, render_kw={ 'autocomplete': 'off', 'placeholder': system.placeholder })) setattr( form, f'reference_system_precision_{system.id}', SelectField(_('precision'), choices=precisions, default=system.precision_default_id))
def add_reference_systems(form: Any, form_name: str) -> None: precisions = [('', '')] for id_ in Node.get_hierarchy('External reference match').subs: precisions.append((str(g.nodes[id_].id), g.nodes[id_].name)) for system in g.reference_systems.values(): if form_name not in [ form_['name'] for form_ in system.get_forms().values() ]: continue setattr( form, 'reference_system_id_{id}'.format(id=system.id), StringField(system.name, validators=[OptionalValidator()], description=system.description, render_kw={ 'autocomplete': 'off', 'placeholder': system.placeholder })) setattr( form, 'reference_system_precision_{id}'.format(id=system.id), SelectField(_('precision'), choices=precisions, default=system.precision_default_id))
def test_api(self) -> None: with app.app_context(): # type: ignore with app.test_request_context(): app.preprocess_request() # type: ignore place = insert_entity('Nostromos', 'place', description='That is the Nostromos') if not place: # Needed for Mypy return # pragma: no cover # Adding Dates to place place.begin_from = '2018-01-31' place.begin_to = '2018-03-01' place.begin_comment = 'Begin of the Nostromos' place.end_from = '2019-01-31' place.end_to = '2019-03-01' place.end_comment = 'Destruction of the Nostromos' place.update() location = place.get_linked_entity_safe('P53') Gis.add_example_geom(location) # Adding Type Settlement place.link('P2', Node.get_hierarchy('Place')) # Adding Alias alias = insert_entity('Cargo hauler', 'appellation') place.link('P1', alias) # Adding External Reference external_reference = insert_entity('https://openatlas.eu', 'external_reference') external_reference.link('P67', place, description='OpenAtlas Website') # Adding feature to place feature = insert_entity('Feature', 'feature', place) # Adding stratigraphic to place insert_entity('Strato', 'stratigraphic_unit', feature) # Adding Administrative Unit Node unit_node = Node.get_hierarchy('Administrative unit') # Adding File to place file = insert_entity('Datei', 'file') file.link('P67', place) file.link('P2', g.nodes[Node.get_hierarchy('License').subs[0]]) # Adding Value Type value_type = Node.get_hierarchy('Dimensions') place.link('P2', Entity.get_by_id(value_type.subs[0]), '23.0') # Adding Geonames geonames = Entity.get_by_id( ReferenceSystem.get_by_name('GeoNames').id) precision_id = Node.get_hierarchy( 'External reference match').subs[0] geonames.link('P67', place, description='2761369', type_id=precision_id) # Test LinkedPlaces output self.maxDiff = None rv = self.app.get(url_for('api.entity', id_=place.id)) self.assertDictEqual(rv.get_json(), api_data.api_linked_place_template) # Test Geojson output rv = self.app.get( url_for('api.entity', id_=place.id, format='geojson')) self.assertDictEqual(rv.get_json(), api_data.api_geojson_template) # ---Content--- # /api/0.2/classes/ rv = self.app.get(url_for('api.class_mapping')) self.assertAlmostEqual(rv.get_json(), ClassMapping.mapping) # /api/0.2/content/ rv = self.app.get(url_for('api.content', lang='de')) self.assertDictEqual(rv.get_json(), api_data.api_content_de) rv = self.app.get(url_for('api.content', lang='en', download=True)) self.assertDictEqual(rv.get_json(), api_data.api_content_en) # /api/0.2/geometric_entities/ rv = self.app.get(url_for('api.geometric_entities')) self.assertDictEqual(rv.get_json(), api_data.api_geometries_template) rv = self.app.get(url_for('api.geometric_entities', download=True)) self.assertDictEqual(rv.get_json(), api_data.api_geometries_template) rv = self.app.get(url_for('api.geometric_entities', count=True)) assert b'1' in rv.data rv = self.app.get( url_for('api.geometric_entities', geometry='gisLineAll', count=True)) assert b'0' in rv.data # /api/0.2/overview_count/ rv = self.app.get(url_for('api.overview_count')) self.assertAlmostEqual(rv.get_json(), api_data.api_overview_count) # /api/0.2/overview_count/ rv = self.app.get(url_for('api.system_class_count')) self.assertDictEqual(rv.get_json(), api_data.api_system_class_count) # ---Nodes--- # /api/0.2/node_entities/ rv = self.app.get(url_for('api.node_entities', id_=unit_node.id)) self.assertDictEqual(rv.get_json(), api_data.api_node_entities) rv = self.app.get( url_for('api.node_entities', id_=unit_node.id, download=True)) self.assertDictEqual(rv.get_json(), api_data.api_node_entities) rv = self.app.get( url_for('api.node_entities', id_=unit_node.id, count=True)) assert b'6' in rv.data # /api/0.2/node_entities_all/ rv = self.app.get( url_for('api.node_entities_all', id_=unit_node.id)) self.assertDictEqual(rv.get_json(), api_data.api_node_entities_all) rv = self.app.get( url_for('api.node_entities_all', id_=unit_node.id, download=True)) self.assertDictEqual(rv.get_json(), api_data.api_node_entities_all) rv = self.app.get( url_for('api.node_entities_all', id_=unit_node.id, count=True)) assert b'8' in rv.data # # /api/0.2/node_overview/ # rv = self.app.get(url_for('api.node_overview')) # self.assertDictEqual(rv.get_json(), api_data.api_node_overview) # rv = self.app.get(url_for('api.node_overview', download=True)) # self.assertDictEqual(rv.get_json(), api_data.api_node_overview) # /api/0.2/subunit/ rv = self.app.get(url_for('api.subunit', id_=place.id)) self.assertDictEqual(rv.get_json(), api_data.api_subunit) rv = self.app.get( url_for('api.subunit', id_=place.id, download=True)) self.assertDictEqual(rv.get_json(), api_data.api_subunit) rv = self.app.get(url_for('api.subunit', id_=place.id, count=True)) assert b'1' in rv.data # /api/0.2/subunit_hierarchy/ rv = self.app.get(url_for('api.subunit_hierarchy', id_=place.id)) self.assertDictEqual(rv.get_json(), api_data.api_subunit_hierarchy) rv = self.app.get( url_for('api.subunit_hierarchy', id_=place.id, download=True)) self.assertDictEqual(rv.get_json(), api_data.api_subunit_hierarchy) rv = self.app.get( url_for('api.subunit_hierarchy', id_=place.id, count=True)) assert b'2' in rv.data # /api/0.2/type_tree/ # rv = self.app.get(url_for('api.type_tree')) # self.assertDictEqual(rv.get_json(), api_data.api_type_tree) # rv = self.app.get(url_for('api.type_tree', download=True)) # self.assertDictEqual(rv.get_json(), api_data.api_type_tree) # ---Entity--- # /api/0.2/code/ rv = self.app.get(url_for('api.code', code='reference')) self.assertDictEqual(rv.get_json(), api_data.api_code_reference) rv = self.app.get( url_for('api.code', code='reference', format='geojson')) self.assertDictEqual(rv.get_json(), api_data.api_code_reference_geojson) rv = self.app.get( url_for('api.code', code='reference', download=True)) self.assertDictEqual(rv.get_json(), api_data.api_code_reference) rv = self.app.get(url_for('api.code', code='place', count=True)) assert b'3' in rv.data rv = self.app.get( url_for('api.code', code='place', show='geometry', limit=2, sort='desc', first=feature.id)) self.assertDictEqual(rv.get_json(), api_data.api_code_place_first_sort_show_limit) rv = self.app.get( url_for('api.code', code='place', limit=10, sort='desc', column='name', filter='or|name|like|Nostromos')) self.assertDictEqual( rv.get_json(), api_data.api_code_place_limit_sort_column_filter) rv = self.app.get( url_for('api.code', code='place', filter='or|id|eq|' + str(place.id))) self.assertDictEqual(rv.get_json(), api_data.api_code_place_filter_id) rv = self.app.get( url_for('api.code', code='place', filter='or|begin_from|ge|2018-1-1')) self.assertDictEqual(rv.get_json(), api_data.api_code_place_filter_time) rv = self.app.get( url_for('api.code', code='reference', export='csv')) assert b'https://openatlas.eu' in rv.data rv = self.app.get( url_for('api.entities_linked_to_entity', id_=place.id)) self.assertDictEqual(rv.get_json(), api_data.api_entities_linked_entity) # Path Tests rv = self.app.get(url_for('api.class', class_code='E31')) assert b'https://openatlas.eu' in rv.data rv = self.app.get( url_for('api.class', class_code='E31', format='geojson')) assert b'https://openatlas.eu' in rv.data rv = self.app.get( url_for('api.class', class_code='E31', download=True)) assert b'https://openatlas.eu' in rv.data rv = self.app.get( url_for('api.class', class_code='E18', export='csv')) assert b'Nostromos' in rv.data rv = self.app.get(url_for('api.latest', latest=10)) assert b'Datei' in rv.data rv = self.app.get(url_for('api.latest', count=True, latest=2)) assert b'2' in rv.data rv = self.app.get( url_for('api.system_class', system_class='appellation')) assert b'Cargo hauler' in rv.data rv = self.app.get( url_for('api.system_class', system_class='appellation', format='geojson')) assert b'Cargo hauler' in rv.data rv = self.app.get(url_for('api.type_entities', id_=unit_node.id)) assert b'Austria' in rv.data rv = self.app.get( url_for('api.type_entities_all', id_=unit_node.id)) assert b'Austria' in rv.data rv = self.app.get( url_for('api.query', entities=place.id, classes='E18', items='place')) assert b'Nostromos' in rv.data rv = self.app.get( url_for('api.query', entities=place.id, classes='E18', items='place', format='geojson')) assert b'Nostromos' in rv.data # Path test with download rv = self.app.get( url_for('api.entity', id_=place.id, download=True)) assert b'Nostromos' in rv.data rv = self.app.get(url_for('api.latest', latest=1, download=True)) assert b'Datei' in rv.data rv = self.app.get( url_for('api.system_class', system_class='appellation', download=True)) assert b'Cargo hauler' in rv.data rv = self.app.get( url_for('api.query', classes='E31', download=True)) assert b'https://openatlas.eu' in rv.data rv = self.app.get(url_for('api.overview_count', download=True)) assert b'systemClass' in rv.data rv = self.app.get(url_for('api.class_mapping', download=True)) assert b'systemClass' in rv.data # Path with export rv = self.app.get(url_for('api.entity', id_=place.id, export='csv')) assert b'Nostromos' in rv.data rv = self.app.get( url_for('api.system_class', system_class='place', export='csv')) assert b'Nostromos' in rv.data # Testing Subunit # Parameter: filter rv = self.app.get( url_for('api.class', class_code='E18', filter='or|name|like|Nostr')) assert b'Nostromos' in rv.data # Parameter: last rv = self.app.get( url_for('api.class', class_code='E18', last=place.id)) assert b'entities' in rv.data # Parameter: first rv = self.app.get( url_for('api.class', class_code='E18', first=place.id)) assert b'entities' in rv.data # Parameter: show rv = self.app.get( url_for('api.class', class_code='E31', show='types')) assert b'https://openatlas.eu' in rv.data rv = self.app.get( url_for('api.class', class_code='E18', show='when')) assert b'Nostromos' in rv.data rv = self.app.get( url_for('api.class', class_code='E31', show='none')) assert b'https://openatlas.eu' in rv.data # Parameter: count rv = self.app.get( url_for('api.class', class_code='E31', count=True)) assert b'2' in rv.data rv = self.app.get( url_for('api.system_class', system_class='appellation', count=True)) assert b'1' in rv.data rv = self.app.get( url_for('api.query', entities=place.id, classes='E18', codes='place')) assert b'Nostromos' in rv.data rv = self.app.get( url_for('api.query', entities=place.id, classes='E18', codes='place', count=True)) assert b'7' in rv.data
def test_actor(self) -> None: with app.app_context(): # type: ignore rv = self.app.get(url_for('index', view='actor')) assert b'No entries' in rv.data rv = self.app.post(url_for('insert', class_='place'), data={ 'name': 'Captain Miller', self.precision_geonames: '', self.precision_wikidata: '' }) residence_id = rv.location.split('/')[-1] with app.test_request_context(): app.preprocess_request() # type: ignore sex_node = Node.get_hierarchy('Sex') sex_node_sub_1 = g.nodes[sex_node.subs[0]] sex_node_sub_2 = g.nodes[sex_node.subs[1]] event = Entity.insert('acquisition', 'Event Horizon') source = Entity.insert('source', 'Necronomicon') # Actor insert rv = self.app.get(url_for('insert', class_='person')) assert b'+ Person' in rv.data self.app.get( url_for('insert', class_='person', origin_id=residence_id)) data = { sex_node.id: sex_node_sub_1.id, 'name': 'Sigourney Weaver', 'alias-1': 'Ripley', 'residence': residence_id, 'begins_in': residence_id, 'ends_in': residence_id, 'description': 'Susan Alexandra Weaver is an American actress.', 'begin_year_from': '-1949', 'begin_month_from': '10', 'begin_day_from': '8', 'begin_year_to': '-1948', 'end_year_from': '2049', 'end_year_to': '2050', self.precision_geonames: '', self.precision_wikidata: '' } rv = self.app.post(url_for('insert', class_='person'), data=data) actor_id = rv.location.split('/')[-1] self.app.post(url_for('insert', class_='group'), data=data) rv = self.app.post(url_for('insert', class_='person', origin_id=residence_id), data=data, follow_redirects=True) assert b'An entry has been created' in rv.data # Test actor nodes rv = self.app.get(url_for('entity_view', id_=sex_node_sub_1.id)) assert b'Susan' in rv.data rv = self.app.get( url_for('node_move_entities', id_=sex_node_sub_1.id)) assert b'Sigourney' in rv.data rv = self.app.post(url_for('node_move_entities', id_=sex_node_sub_1.id), follow_redirects=True, data={ sex_node.id: sex_node_sub_2.id, 'selection': [actor_id], 'checkbox_values': str([actor_id]) }) assert b'Entities were updated' in rv.data rv = self.app.post(url_for('node_move_entities', id_=sex_node_sub_2.id), follow_redirects=True, data={ sex_node.id: '', 'selection': [actor_id], 'checkbox_values': str([actor_id]) }) assert b'Entities were updated' in rv.data self.app.post(url_for('insert', class_='person', origin_id=actor_id), data=data) self.app.post(url_for('insert', class_='person', origin_id=event.id), data=data) self.app.post(url_for('insert', class_='person', origin_id=source.id), data=data) rv = self.app.post(url_for('insert', class_='external_reference'), data={'name': 'https://openatlas.eu'}) reference_id = rv.location.split('/')[-1] rv = self.app.post(url_for('insert', class_='person', origin_id=reference_id), data=data, follow_redirects=True) assert b'An entry has been created' in rv.data data['continue_'] = 'yes' rv = self.app.post(url_for('insert', class_='person'), data=data, follow_redirects=True) assert b'An entry has been created' in rv.data rv = self.app.get(url_for('index', view='actor')) assert b'Sigourney Weaver' in rv.data # Add to actor rv = self.app.get(url_for('entity_add_source', id_=actor_id)) assert b'Link source' in rv.data rv = self.app.post(url_for('entity_add_source', id_=actor_id), data={'checkbox_values': str([source.id])}, follow_redirects=True) assert b'Necronomicon' in rv.data rv = self.app.get(url_for('entity_add_reference', id_=actor_id)) assert b'Link reference' in rv.data rv = self.app.post(url_for('entity_add_reference', id_=actor_id), data={ 'reference': reference_id, 'page': '777' }, follow_redirects=True) assert b'777' in rv.data # Actor update rv = self.app.get(url_for('update', id_=actor_id)) assert b'American actress' in rv.data data['name'] = 'Susan Alexandra Weaver' data['alias-1'] = 'Ripley1' data['end_year_from'] = '' data['end_year_to'] = '' data['begin_year_to'] = '1950' data['begin_day_from'] = '' rv = self.app.post(url_for('update', id_=actor_id), data=data, follow_redirects=True) assert b'Changes have been saved' in rv.data rv = self.app.post(url_for('ajax_bookmark'), data={'entity_id': actor_id}, follow_redirects=True) assert b'Remove bookmark' in rv.data rv = self.app.get('/') assert b'Weaver' in rv.data rv = self.app.post(url_for('ajax_bookmark'), data={'entity_id': actor_id}, follow_redirects=True) assert b'Bookmark' in rv.data rv = self.app.get(url_for('link_delete', origin_id=actor_id, id_=666), follow_redirects=True) assert b'removed' in rv.data # Actor delete rv = self.app.get( url_for('index', view='actor', delete_id=actor_id)) assert b'The entry has been deleted.' in rv.data
def test_event(self) -> None: with app.app_context(): # type: ignore # Create entities for file with app.test_request_context(): app.preprocess_request() # type: ignore actor = Entity.insert('person', 'File keeper') reference = Entity.insert('edition', 'Ancient Books') node_id = Node.get_hierarchy('Sex').subs[0] # Insert rv = self.app.get( url_for('insert', class_='file', origin_id=actor.id)) assert b'+ File' in rv.data logo = \ pathlib.Path(app.root_path) \ / 'static' / 'images' / 'layout' / 'logo.png' with open(logo, 'rb') as img: rv = self.app.post(url_for('insert', class_='file', origin_id=actor.id), data={ 'name': 'OpenAtlas logo', 'file': img }, follow_redirects=True) assert b'An entry has been created' in rv.data with open(logo, 'rb') as img1, open(logo, 'rb') as img2: rv = self.app.post(url_for('insert', class_='file', origin_id=actor.id), data={ 'name': 'OpenAtlas logo', 'file': [img1, img2] }, follow_redirects=True) assert b'An entry has been created' in rv.data with open(logo, 'rb') as img: rv = self.app.post(url_for('insert', class_='file', origin_id=reference.id), data={ 'name': 'OpenAtlas logo', 'file': img }, follow_redirects=True) assert b'An entry has been created' in rv.data with app.test_request_context(): app.preprocess_request() # type: ignore files = Entity.get_by_class('file') file_id = files[0].id file_id2 = files[1].id # Logo rv = self.app.get(url_for('admin_logo'), data={'file': file_id}, follow_redirects=True) assert b'OpenAtlas logo' in rv.data with self.app.get( url_for('display_logo', filename=str(file_id) + '.png')): pass # Test logo with "with" to prevent unclosed files warning rv = self.app.get(url_for('admin_logo', id_=file_id), follow_redirects=True) assert b'Remove custom logo' in rv.data rv = self.app.get(url_for('admin_index', action="remove_logo", id_=0), follow_redirects=True) assert b'Logo' in rv.data with open( pathlib.Path(app.root_path) / 'views' / 'index.py', 'rb') \ as invalid_file: rv = self.app.post(url_for('insert', class_='file', origin_id=actor.id), data={ 'name': 'Invalid file', 'file': invalid_file }, follow_redirects=True) assert b'File type not allowed' in rv.data rv = self.app.post(url_for('insert', class_='file', origin_id=actor.id), follow_redirects=True, data={'name': 'This is not a file'}) assert b'This field is required' in rv.data # View rv = self.app.get(url_for('entity_view', id_=file_id)) assert b'OpenAtlas logo' in rv.data rv = self.app.get(url_for('entity_view', id_=file_id2)) assert b'OpenAtlas logo' in rv.data with self.app.get( url_for('download_file', filename=str(file_id) + '.png')): pass # Calling with "with" to prevent unclosed files warning with self.app.get( url_for('display_file', filename=str(file_id) + '.png')): pass # Calling with "with" to prevent unclosed files warning # Index rv = self.app.get(url_for('index', view='file')) assert b'OpenAtlas logo' in rv.data # Set and unset as main image self.app.get(url_for('set_profile_image', id_=file_id, origin_id=actor.id), follow_redirects=True) self.app.get( url_for('file_remove_profile_image', entity_id=actor.id)) # Add to reference rv = self.app.get( url_for('reference_add', id_=reference.id, view='file')) assert b'OpenAtlas logo' in rv.data rv = self.app.post(url_for('reference_add', id_=reference.id, view='file'), data={ 'file': file_id, 'page': '777' }, follow_redirects=True) assert b'777' in rv.data # Update rv = self.app.get(url_for('update', id_=file_id)) assert b'OpenAtlas logo' in rv.data rv = self.app.post(url_for('update', id_=file_id), data={'name': 'Updated file'}, follow_redirects=True) assert b'Changes have been saved' in rv.data \ and b'Updated file' in rv.data rv = self.app.get(url_for('file_add', id_=file_id, view='actor')) assert b'Link actor' in rv.data rv = self.app.post(url_for('file_add', id_=file_id, view='actor'), data={'checkbox_values': [actor.id]}, follow_redirects=True) assert b'File keeper' in rv.data rv = self.app.post(url_for('entity_add_file', id_=node_id), data={'checkbox_values': str([file_id])}, follow_redirects=True) assert b'Updated file' in rv.data # Delete for file in files: rv = self.app.get( url_for('index', view='file', delete_id=file.id)) assert b'The entry has been deleted' in rv.data
def test_reference_system(self) -> None: with app.app_context(): # type: ignore rv = self.app.get(url_for('index', view='reference_system')) assert b'GeoNames' in rv.data geonames = ReferenceSystem.get_by_name('GeoNames') wikidata = ReferenceSystem.get_by_name('Wikidata') precision_id = Node.get_hierarchy( 'External reference match').subs[0] rv = self.app.get(url_for('insert', class_='reference_system')) assert b'Resolver URL' in rv.data data = { 'name': 'Wikipedia', 'website_url': 'https://wikipedia.org', 'resolver_url': 'https://wikipedia.org', 'forms': [geonames.forms[0]] } rv = self.app.post(url_for('insert', class_='reference_system'), follow_redirects=True, data=data) assert b'An entry has been created.' in rv.data wikipedia_id = ReferenceSystem.get_by_name('Wikipedia').id rv = self.app.get(url_for('index', view='reference_system', delete_id=wikipedia_id), follow_redirects=True) assert b'Deletion not possible if forms are attached' in rv.data rv = self.app.get(url_for('reference_system_remove_form', system_id=wikipedia_id, form_id=geonames.forms[0]), follow_redirects=True) assert b'Changes have been saved' in rv.data rv = self.app.get( url_for('index', view='reference_system', delete_id=wikipedia_id)) assert b'The entry has been deleted' in rv.data rv = self.app.post(url_for('update', id_=geonames.id)) assert b'Website URL' in rv.data data = { 'name': 'GeoNames', Node.get_hierarchy('External reference match').id: precision_id, 'website_url': 'https://www.geonames2.org/', 'resolver_url': 'https://www.geonames2.org/' } rv = self.app.post(url_for('update', id_=geonames.id), follow_redirects=True, data=data) assert b'Changes have been saved.' in rv.data rv = self.app.post(url_for('update', id_=geonames.id), follow_redirects=True, data=data) assert b'https://www.geonames2.org/' in rv.data rv = self.app.post(url_for('insert', class_='person'), data={ 'name': 'Actor test', 'reference_system_id_' + str(wikidata.id): 'Q123', self.precision_geonames: '', self.precision_wikidata: precision_id }) person_id = rv.location.split('/')[-1] rv = self.app.get(url_for('entity_view', id_=wikidata.id), follow_redirects=True) assert b'Actor test' in rv.data rv = self.app.get(url_for('entity_view', id_=person_id), follow_redirects=True) assert b'Wikidata' in rv.data rv = self.app.get(url_for('update', id_=person_id)) assert b'Q123' in rv.data # Testing errors rv = self.app.post(url_for('insert', class_='reference_system'), follow_redirects=True, data={'name': 'GeoNames'}) assert b'A transaction error occurred' in rv.data rv = self.app.get( url_for('index', view='reference_system', delete_id=geonames.id)) assert b'403' in rv.data rv = self.app.post(url_for('insert', class_='person'), data={ 'name': 'Actor with Wikidata but without precision', 'reference_system_id_' + str(wikidata.id): 'Q123', self.precision_geonames: '', self.precision_wikidata: '' }) assert b'required' in rv.data rv = self.app.post(url_for('insert', class_='person'), data={ 'name': 'Actor with invalid Wikidata id', 'reference_system_id_' + str(wikidata.id): 'invalid id', self.precision_geonames: '', self.precision_wikidata: precision_id }) assert b'Wrong id format' in rv.data rv = self.app.post(url_for('insert', class_='place'), data={ 'name': 'Reference test', 'reference_system_id_' + str(geonames.id): 'invalid id', self.precision_geonames: '', self.precision_wikidata: '' }) assert b'Wrong id format' in rv.data rv = self.app.get(url_for('reference_system_remove_form', system_id=geonames.id, form_id=geonames.forms[0]), follow_redirects=True) assert b'Changes have been saved' in rv.data rv = self.app.get( url_for('index', view='reference_system', delete_id=geonames.id)) assert b'403 - Forbidden' in rv.data
def test_involvement(self) -> None: with app.app_context(): # type: ignore rv = self.app.post( url_for('insert', class_='acquisition'), data={ 'name': 'Event Horizon', 'begin_year_from': '949', 'begin_month_from': '10', 'begin_day_from': '8', 'end_year_from': '1951', self.precision_geonames: '', self.precision_wikidata: ''}) event_id = int(rv.location.split('/')[-1]) with app.test_request_context(): app.preprocess_request() # type: ignore actor = Entity.insert('person', 'Captain Miller') involvement = Node.get_hierarchy('Involvement') # Add involvement rv = self.app.get(url_for('involvement_insert', origin_id=actor.id)) assert b'Involvement' in rv.data rv = self.app.post( url_for('involvement_insert', origin_id=actor.id), data={ 'event': str([event_id]), 'activity': 'P11', 'begin_year_from': '950', 'end_year_from': '1950', involvement.id: involvement.id}, follow_redirects=True) assert b'Event Horizon' in rv.data rv = self.app.post( url_for('involvement_insert', origin_id=event_id), data={ 'actor': str([actor.id]), 'continue_': 'yes', 'activity': 'P22'}, follow_redirects=True) assert b'Event Horizon' in rv.data rv = self.app.get(url_for('entity_view', id_=event_id)) assert b'Event Horizon' in rv.data rv = self.app.get(url_for('entity_view', id_=actor.id)) assert b'Appears first' in rv.data # Update involvement with app.test_request_context(): app.preprocess_request() # type: ignore link_id = Link.get_links(event_id, 'P22')[0].id rv = self.app.get( url_for('involvement_update', id_=link_id, origin_id=event_id)) assert b'Captain' in rv.data rv = self.app.post( url_for('involvement_update', id_=link_id, origin_id=actor.id), data={ 'description': 'Infinite Space - Infinite Terror', 'activity': 'P23'}, follow_redirects=True) assert b'Infinite Space - Infinite Terror' in rv.data rv = self.app.get(url_for('entity_view', id_=actor.id)) assert b'Appears first' in rv.data rv = self.app.get(url_for('entity_view', id_=event_id)) assert b'Infinite Space - Infinite Terror' in rv.data
def test_node(self) -> None: with app.app_context(): # type: ignore with app.test_request_context(): app.preprocess_request() # type: ignore actor_node = Node.get_hierarchy('Actor actor relation') dimension_node = Node.get_hierarchy('Dimensions') sex_node = Node.get_hierarchy('Sex') rv = self.app.get(url_for('node_index')) assert b'Actor actor relation' in rv.data rv = self.app.get( url_for('insert', class_='type', origin_id=actor_node.id)) assert b'Actor actor relation' in rv.data rv = self.app.post( url_for('insert', class_='type', origin_id=actor_node.id), data={'name_search': 'new'}) assert b'Inverse' in rv.data data = { 'name': 'My secret node', 'name_inverse': 'Do I look inverse?', 'description': 'Very important!'} rv = self.app.post( url_for('insert', class_='type', origin_id=actor_node.id), data=data) node_id = rv.location.split('/')[-1] rv = self.app.get(url_for('update', id_=node_id)) assert b'My secret node' in rv.data and b'Super' in rv.data self.app.post( url_for('insert', class_='type', origin_id=sex_node.id), data=data) rv = self.app.post( url_for('update', id_=node_id), data=data, follow_redirects=True) assert b'Changes have been saved.' in rv.data # Insert an continue data['continue_'] = 'yes' rv = self.app.post( url_for('insert', class_='type', origin_id=actor_node.id), data=data, follow_redirects=True) assert b'An entry has been created' in rv.data data['continue_'] = '' # Forbidden system node rv = self.app.post( url_for('update', id_=actor_node.id), data=data, follow_redirects=True) assert b'Forbidden' in rv.data # Update with self as root data[str(actor_node.id)] = node_id rv = self.app.post( url_for('update', id_=node_id), data=data, follow_redirects=True) assert b'Type can't have itself as super.' in rv.data # Update with sub as root rv = self.app.post( url_for('insert', class_='type', origin_id=actor_node.id), data=data) sub_node_id = rv.location.split('/')[-1].replace('node#tab-', '') data[str(actor_node.id)] = sub_node_id rv = self.app.post( url_for('update', id_=node_id), data=data, follow_redirects=True) assert b'Type can't have a sub as super.' in rv.data # Custom type rv = self.app.get( url_for('entity_view', id_=sex_node.id), follow_redirects=True) assert b'Male' in rv.data # Administrative unit rv = self.app.get( url_for( 'entity_view', id_=Node.get_hierarchy('Administrative unit').id), follow_redirects=True) assert b'Austria' in rv.data # Value type rv = self.app.get( url_for('entity_view', id_=dimension_node.id), follow_redirects=True) assert b'Height' in rv.data rv = self.app.get( url_for('entity_view', id_=dimension_node.subs[0])) assert b'Unit' in rv.data rv = self.app.get(url_for('update', id_=dimension_node.subs[0])) assert b'Dimensions' in rv.data # Untyped entities with app.test_request_context(): app.preprocess_request() # type: ignore actor = Entity.insert('person', 'Connor MacLeod') rv = self.app.get(url_for('show_untyped_entities', id_=sex_node.id)) assert b'Connor MacLeod' in rv.data with app.test_request_context(): app.preprocess_request() # type: ignore actor.link('P2', g.nodes[sex_node.subs[0]]) rv = self.app.get(url_for('show_untyped_entities', id_=sex_node.id)) assert b'No entries' in rv.data # Delete rv = self.app.get( url_for('node_delete', id_=actor_node.id), follow_redirects=True) assert b'Forbidden' in rv.data rv = self.app.get( url_for('node_delete', id_=sub_node_id), follow_redirects=True) assert b'The entry has been deleted.' in rv.data
def test_relation(self) -> None: with app.app_context(): # type: ignore with app.test_request_context(): app.preprocess_request() # type: ignore actor = Entity.insert('person', 'Connor MacLeod') related = Entity.insert('person', 'The Kurgan') # Add relationship rv = self.app.get(url_for('relation_insert', origin_id=actor.id)) assert b'Actor actor relation' in rv.data relation_id = Node.get_hierarchy('Actor actor relation').id relation_sub_id = g.nodes[relation_id].subs[0] relation_sub_id2 = g.nodes[relation_id].subs[1] data = { 'actor': str([related.id]), relation_id: relation_sub_id, 'inverse': None, 'begin_year_from': '-1949', 'begin_month_from': '10', 'begin_day_from': '8', 'begin_year_to': '-1948', 'end_year_from': '2049', 'end_year_to': '2050' } rv = self.app.post(url_for('relation_insert', origin_id=actor.id), data=data, follow_redirects=True) assert b'The Kurgan' in rv.data rv = self.app.get(url_for('entity_view', id_=relation_sub_id)) assert b'Connor' in rv.data data['continue_'] = 'yes' data['inverse'] = True rv = self.app.post(url_for('relation_insert', origin_id=actor.id), data=data, follow_redirects=True) assert b'The Kurgan' in rv.data rv = self.app.get(url_for('entity_view', id_=actor.id)) assert b'The Kurgan' in rv.data rv = self.app.post(url_for('relation_insert', origin_id=related.id), data=data, follow_redirects=True) assert b"link to itself" in rv.data # Relation types rv = self.app.get( url_for('node_move_entities', id_=relation_sub_id)) assert b'The Kurgan' in rv.data # Update relationship with app.test_request_context(): app.preprocess_request() # type: ignore link_id = Link.get_links(actor.id, 'OA7')[0].id link_id2 = Link.get_links(actor.id, 'OA7', True)[0].id rv = self.app.post(url_for('node_move_entities', id_=relation_sub_id), follow_redirects=True, data={ relation_id: relation_sub_id2, 'selection': [link_id], 'checkbox_values': str([link_id]) }) assert b'Entities were updated' in rv.data rv = self.app.post(url_for('node_move_entities', id_=relation_sub_id2), data={ relation_id: '', 'selection': [link_id], 'checkbox_values': str([link_id]) }, follow_redirects=True) assert b'Entities were updated' in rv.data rv = self.app.get( url_for('relation_update', id_=link_id, origin_id=related.id)) assert b'Connor' in rv.data rv = self.app.post(url_for('relation_update', id_=link_id, origin_id=actor.id), data={ 'description': 'There can be only one!', 'inverse': True }, follow_redirects=True) assert b'only one' in rv.data rv = self.app.post(url_for('relation_update', id_=link_id2, origin_id=actor.id), data={ 'description': 'There can be only one!', 'inverse': None }, follow_redirects=True) assert b'only one' in rv.data
def test_image(self) -> None: app.config['IMAGE_SIZE']['tmp'] = '1' with app.app_context(): # type: ignore with app.test_request_context(): app.preprocess_request() # type: ignore place = insert_entity('Nostromos', 'place', description='That is the Nostromos') logo = \ pathlib.Path(app.root_path) \ / 'static' / 'images' / 'layout' / 'logo.png' # Resizing through UI insert with open(logo, 'rb') as img: rv = self.app.post(url_for('insert', class_='file', origin_id=place.id), data={ 'name': 'OpenAtlas logo', 'file': img }, follow_redirects=True) assert b'An entry has been created' in rv.data with app.test_request_context(): app.preprocess_request() # type: ignore files = Entity.get_by_class('file') file_id = files[0].id # Set and unset as main image self.app.get(url_for('set_profile_image', id_=file_id, origin_id=place.id), follow_redirects=True) # Delete through UI rv = self.app.get(url_for('index', view='file', delete_id=file_id)) assert b'The entry has been deleted' in rv.data # Create entities for file with app.test_request_context(): app.preprocess_request() # type: ignore file_pathless = insert_entity('Pathless_File', 'file') file = insert_entity('Test_File', 'file') file.link('P2', g.nodes[Node.get_hierarchy('License').subs[0]]) file_name = f'{file.id}.jpeg' src_png = \ pathlib.Path(app.root_path) \ / 'static' / 'images' / 'layout' / 'logo.png' dst_png = \ pathlib.Path(app.config['UPLOAD_DIR'] / file_name) copyfile(src_png, dst_png) file2 = insert_entity('Test_File2', 'file') file2.link('P2', g.nodes[Node.get_hierarchy('License').subs[0]]) file2_name = f'{file2.id}.jpeg' src2_png = \ pathlib.Path(app.root_path) \ / 'static' / 'images' / 'layout' / 'logo.png' dst2_png = pathlib.Path(app.config['UPLOAD_DIR'] / file2_name) copyfile(src2_png, dst2_png) file_py = insert_entity('Test_Py', 'file') file_name_py = f'{file_py.id}.py' src_py = pathlib.Path(app.root_path) / 'views' / 'index.py' dst_py = pathlib.Path(app.config['UPLOAD_DIR'] / file_name_py) copyfile(src_py, dst_py) # Exception ImageProcessing.safe_resize_image(file2.id, '.png', size="???") display_profile_image(file_pathless) # Resizing images (don't change order!) rv = self.app.get(url_for('entity_view', id_=file.id)) assert b'Test_File' in rv.data rv = self.app.get(url_for('entity_view', id_=file_py.id)) assert b'No preview available' in rv.data rv = self.app.get(url_for('entity_view', id_=file_pathless.id)) assert b'Missing file' in rv.data rv = self.app.get(url_for('index', view='file')) assert b'Test_File' in rv.data # Display file rv = self.app.get(url_for('display_file', filename=file_name)) assert b'\xff' in rv.data rv = self.app.get( url_for('display_file', filename=file_name, size=app.config['IMAGE_SIZE']['thumbnail'])) assert b'\xff' in rv.data rv = self.app.get( url_for('display_file', filename=file_name, size=app.config['IMAGE_SIZE']['table'])) assert b'\xff' in rv.data rv = self.app.get( url_for('display_file', filename=file_name_py, size=app.config['IMAGE_SIZE']['table'])) assert b'404' in rv.data # Make directory if not exist rv = self.app.get(url_for('entity_view', id_=file.id)) assert b'Test_File' in rv.data # Exception app.config['IMAGE_SIZE']['tmp'] = '<' rv = self.app.get(url_for('entity_view', id_=file.id)) assert b'Test_File' in rv.data app.config['IMAGE_SIZE']['tmp'] = '1' rv = self.app.get(url_for('admin_resize_images'), follow_redirects=True) assert b'Images were created' in rv.data rv = self.app.get(url_for('admin_delete_orphaned_resized_images'), follow_redirects=True) assert b'Resized orphaned images were deleted' in rv.data rv = self.app.get(url_for('index', view='file', delete_id=file.id)) assert b'The entry has been deleted' in rv.data rv = self.app.get(url_for('index', view='file', delete_id=file2.id)) assert b'The entry has been deleted' in rv.data shutil.rmtree( pathlib.Path(app.config['RESIZED_IMAGES'] / app.config['IMAGE_SIZE']['tmp'])) dst_py.unlink() del app.config['IMAGE_SIZE']['tmp']
def add_fields(form: Any, class_: str, code: Union[str, None], item: Union[Entity, Node, Link, None], origin: Union[Entity, Node, None]) -> None: if class_ == 'actor_actor_relation': setattr(form, 'inverse', BooleanField(_('inverse'))) if not item: setattr(form, 'actor', TableMultiField(_('actor'), [InputRequired()])) setattr(form, 'relation_origin_id', HiddenField()) elif class_ in ['activity', 'acquisition', 'move']: setattr(form, 'event_id', HiddenField()) setattr(form, 'event', TableField(_('sub event of'))) if class_ == 'activity': setattr(form, 'place', TableField(_('location'))) if class_ == 'acquisition': setattr(form, 'place', TableField(_('location'))) setattr(form, 'given_place', TableMultiField(_('given place'))) elif class_ == 'move': setattr(form, 'place_from', TableField(_('from'))) setattr(form, 'place_to', TableField(_('to'))) setattr(form, 'artifact', TableMultiField()) setattr(form, 'person', TableMultiField()) elif class_ == 'file' and not item: setattr(form, 'file', FileField(_('file'), [InputRequired()])) elif class_ == 'group': setattr(form, 'residence', TableField(_('residence'))) setattr(form, 'begins_in', TableField(_('begins in'))) setattr(form, 'ends_in', TableField(_('ends in'))) elif class_ == 'hierarchy': if code == 'custom' or (item and not item.value_type): setattr( form, 'multiple', BooleanField(_('multiple'), description=_('tooltip hierarchy multiple'))) setattr( form, 'forms', SelectMultipleField(_('classes'), render_kw={'disabled': True}, description=_('tooltip hierarchy forms'), choices=[], option_widget=widgets.CheckboxInput(), widget=widgets.ListWidget(prefix_label=False), coerce=int)) elif class_ == 'involvement': if not item and origin: involved_with = 'actor' if origin.class_.view == 'event' else 'event' setattr(form, involved_with, TableMultiField(_(involved_with), [InputRequired()])) setattr(form, 'activity', SelectField(_('activity'))) elif class_ == 'member' and not item: setattr(form, 'member_origin_id', HiddenField()) setattr(form, 'actor' if code == 'member' else 'group', TableMultiField(_('actor'), [InputRequired()])) elif class_ in g.classes and g.classes[class_].view == 'type': setattr(form, 'is_node_form', HiddenField()) node = item if item else origin root = g.nodes[node.root[-1]] if node.root else node setattr(form, str(root.id), TreeField(str(root.id))) if root.directional: setattr(form, 'name_inverse', StringField(_('inverse'))) elif class_ == 'person': setattr(form, 'residence', TableField(_('residence'))) setattr(form, 'begins_in', TableField(_('born in'))) setattr(form, 'ends_in', TableField(_('died in'))) elif class_ == 'reference_system': setattr( form, 'website_url', StringField(_('website URL'), validators=[OptionalValidator(), URL()])) setattr( form, 'resolver_url', StringField(_('resolver URL'), validators=[OptionalValidator(), URL()])) setattr(form, 'placeholder', StringField(_('example ID'))) precision_node_id = str( Node.get_hierarchy('External reference match').id) setattr(form, precision_node_id, TreeField(precision_node_id)) choices = ReferenceSystem.get_form_choices(item) if choices: setattr( form, 'forms', SelectMultipleField( _('forms'), render_kw={'disabled': True}, choices=choices, option_widget=widgets.CheckboxInput(), widget=widgets.ListWidget(prefix_label=False), coerce=int)) elif class_ == 'source': setattr(form, 'artifact', TableMultiField())
def test_hierarchy(self) -> None: with app.app_context(): # type: ignore # Custom types data = { 'name': 'Geronimo', 'forms': [1, 2, 5, 6, 7, 8], 'multiple': True, 'description': 'Very important!' } rv = self.app.post(url_for('hierarchy_insert', param='custom'), follow_redirects=True, data=data) assert b'An entry has been created' in rv.data rv = self.app.post(url_for('hierarchy_insert', param='custom'), follow_redirects=True, data=data) assert b'The name is already in use' in rv.data with app.test_request_context(): hierarchy = Node.get_hierarchy('Geronimo') rv = self.app.get(url_for('hierarchy_update', id_=hierarchy.id)) assert b'Geronimo' in rv.data data['forms'] = [4] rv = self.app.post(url_for('hierarchy_update', id_=hierarchy.id), data=data, follow_redirects=True) assert b'Changes have been saved.' in rv.data rv = self.app.get(url_for('hierarchy_insert', param='custom')) assert b'+ Custom' in rv.data data = {'name': 'My secret node', 'description': 'Very important!'} rv = self.app.post(url_for('insert', class_='type', origin_id=hierarchy.id), data=data) node_id = rv.location.split('/')[-1] rv = self.app.get(url_for('hierarchy_remove_form', id_=hierarchy.id, form_id=5), follow_redirects=True) assert b'Changes have been saved.' in rv.data rv = self.app.get(url_for('node_delete', id_=node_id), follow_redirects=True) assert b'deleted' in rv.data rv = self.app.post(url_for('hierarchy_update', id_=hierarchy.id), data={'name': 'Actor actor relation'}, follow_redirects=True) assert b'The name is already in use' in rv.data rv = self.app.post(url_for('hierarchy_delete', id_=hierarchy.id), follow_redirects=True) assert b'deleted' in rv.data # Value types rv = self.app.get(url_for('hierarchy_insert', param='value')) assert b'+ Value' in rv.data rv = self.app.post(url_for('hierarchy_insert', param='value'), follow_redirects=True, data={ 'name': 'A valued value', 'forms': [1], 'description': '' }) assert b'An entry has been created' in rv.data with app.test_request_context(): value_node = Node.get_hierarchy('A valued value') rv = self.app.get(url_for('hierarchy_update', id_=value_node.id)) assert b'valued' in rv.data # Test checks actor_node = Node.get_hierarchy('Actor actor relation') rv = self.app.get(url_for('hierarchy_update', id_=actor_node.id), follow_redirects=True) assert b'Forbidden' in rv.data rv = self.app.get(url_for('hierarchy_delete', id_=actor_node.id), follow_redirects=True) assert b'Forbidden' in rv.data
def test_event(self) -> None: with app.app_context(): # type: ignore # Create entities for event place_name = 'Lewis and Clark' rv = self.app.post(url_for('insert', class_='place'), data={ 'name': place_name, self.precision_geonames: '', self.precision_wikidata: '' }) residence_id = rv.location.split('/')[-1] actor_name = 'Captain Miller' with app.test_request_context(): app.preprocess_request() # type: ignore actor = Entity.insert('person', actor_name) file = Entity.insert('file', 'X-Files') source = Entity.insert('source', 'Necronomicon') carrier = Entity.insert('artifact', 'Artifact') reference = Entity.insert('external_reference', 'https://openatlas.eu') # Insert rv = self.app.get(url_for('insert', class_='activity')) assert b'+ Activity' in rv.data data = { 'name': 'Event Horizon', 'place': residence_id, self.precision_wikidata: '' } rv = self.app.post(url_for('insert', class_='activity', origin_id=reference.id), data=data, follow_redirects=True) assert bytes('Event Horizon', 'utf-8') in rv.data with app.test_request_context(): app.preprocess_request() # type: ignore activity_id = Entity.get_by_view('event')[0].id self.app.post(url_for('insert', class_='activity', origin_id=actor.id), data=data) self.app.post(url_for('insert', class_='activity', origin_id=file.id), data=data) self.app.post(url_for('insert', class_='activity', origin_id=source.id), data=data) rv = self.app.get( url_for('insert', class_='activity', origin_id=residence_id)) assert b'Location' in rv.data rv = self.app.get( url_for('insert', class_='move', origin_id=residence_id)) assert b'Location' not in rv.data # Acquisition event_name2 = 'Second event' wikidata = \ f"reference_system_id_" \ f"{ReferenceSystem.get_by_name('Wikidata').id}" precision = Node.get_hierarchy('External reference match').subs[0] rv = self.app.post(url_for('insert', class_='acquisition'), data={ 'name': event_name2, 'given_place': [residence_id], 'place': residence_id, 'event': activity_id, 'begin_year_from': '1949', 'begin_month_from': '10', 'begin_day_from': '8', 'end_year_from': '1951', wikidata: 'Q123', self.precision_wikidata: precision }) event_id = rv.location.split('/')[-1] rv = self.app.get(url_for('entity_view', id_=event_id)) assert b'Event Horizon' in rv.data # Move rv = self.app.post(url_for('insert', class_='move'), data={ 'name': 'Keep it moving', 'place_to': residence_id, 'place_from': residence_id, 'artifact': carrier.id, 'person': actor.id, self.precision_wikidata: '' }) move_id = rv.location.split('/')[-1] rv = self.app.get(url_for('entity_view', id_=move_id)) assert b'Keep it moving' in rv.data rv = self.app.get(url_for('entity_view', id_=carrier.id)) assert b'Keep it moving' in rv.data rv = self.app.get(url_for('update', id_=move_id)) assert b'Keep it moving' in rv.data # Add another event and test if events are seen at place event_name3 = 'Third event' self.app.post(url_for('insert', class_='acquisition'), data={ 'name': event_name3, 'given_place': [residence_id], self.precision_geonames: '', self.precision_wikidata: '' }) rv = self.app.get(url_for('entity_view', id_=residence_id)) assert bytes(place_name, 'utf-8') in rv.data rv = self.app.get(url_for('entity_view', id_=actor.id)) assert bytes(actor_name, 'utf-8') in rv.data rv = self.app.post(url_for('insert', class_='acquisition'), follow_redirects=True, data={ 'name': 'Event Horizon', 'continue_': 'yes', self.precision_geonames: '', self.precision_wikidata: '' }) assert b'An entry has been created' in rv.data rv = self.app.get(url_for('index', view='event')) assert b'Event' in rv.data self.app.get(url_for('entity_view', id_=activity_id)) # Add to event rv = self.app.get(url_for('entity_add_file', id_=event_id)) assert b'Link file' in rv.data rv = self.app.post(url_for('entity_add_file', id_=event_id), data={'checkbox_values': str([file.id])}, follow_redirects=True) assert b'X-Files' in rv.data rv = self.app.get(url_for('entity_add_reference', id_=event_id)) assert b'Link reference' in rv.data rv = self.app.post(url_for('entity_add_reference', id_=event_id), data={ 'reference': reference.id, 'page': '777' }, follow_redirects=True) assert b'777' in rv.data # Update rv = self.app.get(url_for('update', id_=activity_id)) assert b'Event Horizon' in rv.data rv = self.app.get(url_for('update', id_=event_id)) assert b'Event Horizon' in rv.data data['name'] = 'Event updated' rv = self.app.post(url_for('update', id_=event_id), data=data, follow_redirects=True) assert b'Changes have been saved' in rv.data # Test super event validation data = {'name': 'Event Horizon', 'event': event_id} rv = self.app.post(url_for('update', id_=event_id), data=data, follow_redirects=True) assert b'error' in rv.data # Delete rv = self.app.get( url_for('index', view='event', delete_id=event_id)) assert b'The entry has been deleted.' in rv.data
def test_api(self) -> None: pass with app.app_context(): # type: ignore with app.test_request_context(): app.preprocess_request() # type: ignore place = insert_entity('Nostromos', 'place', description='That is the Nostromos') if not place: return # pragma: no cover # Adding Dates to place place.begin_from = '2018-01-31' place.begin_to = '2018-03-01' place.begin_comment = 'Begin of the Nostromos' place.end_from = '2019-01-31' place.end_to = '2019-03-01' place.end_comment = 'Destruction of the Nostromos' location = place.get_linked_entity_safe('P53') Gis.add_example_geom(location) # Adding Type Settlement place.link('P2', Node.get_hierarchy('Place')) # Adding Alias alias = insert_entity('Cargo hauler', 'appellation') place.link('P1', alias) # Adding External Reference external_reference = insert_entity('https://openatlas.eu', 'external_reference') external_reference.link('P67', place, description='OpenAtlas Website') # Adding feature to place feature = insert_entity('Feature', 'feature', place) # Adding stratigraphic to place strati = insert_entity('Strato', 'stratigraphic_unit', feature) # Adding Administrative Unit Node unit_node = Node.get_hierarchy('Administrative unit') # Adding File to place file = insert_entity('Datei', 'file') file.link('P67', place) file.link('P2', Node.get_hierarchy('License')) # Adding Value Type value_type = Node.get_hierarchy('Dimensions') place.link('P2', Entity.get_by_id(value_type.subs[0]), '23.0') # Adding Geonames geonames = Entity.get_by_id( ReferenceSystem.get_by_name('GeoNames').id) precision_id = Node.get_hierarchy( 'External reference match').subs[0] geonames.link('P67', place, description='2761369', type_id=precision_id) # Path Tests rv = self.app.get(url_for('usage')) assert b'message' in rv.data rv = self.app.get(url_for('latest', latest=10)) assert b'Datei' in rv.data rv = self.app.get(url_for('latest', count=True, latest=1)) assert b'1' in rv.data rv = self.app.get(url_for('entity', id_=place.id)) assert b'Nostromos' in rv.data rv = self.app.get(url_for('code', code='reference')) assert b'openatlas' in rv.data rv = self.app.get( url_for('system_class', system_class='appellation')) assert b'Cargo hauler' in rv.data rv = self.app.get(url_for('class', class_code='E31')) assert b'https://openatlas.eu' in rv.data rv = self.app.get(url_for('node_entities', id_=unit_node.id)) assert b'Austria' in rv.data rv = self.app.get(url_for('node_entities_all', id_=unit_node.id)) assert b'Austria' in rv.data rv = self.app.get( url_for('query', entities=place.id, classes='E18', items='place')) assert b'Nostromos' in rv.data rv = self.app.get(url_for('content', lang='de')) assert b'intro' in rv.data rv = self.app.get(url_for('overview_count')) assert b'systemClass' in rv.data rv = self.app.get(url_for('class_mapping')) assert b'systemClass' in rv.data rv = self.app.get(url_for('node_overview')) assert b'Actor' in rv.data rv = self.app.get(url_for('type_tree')) assert b'type_tree' in rv.data # Path test with download rv = self.app.get(url_for('entity', id_=place.id, download=True)) assert b'Nostromos' in rv.data rv = self.app.get(url_for('latest', latest=1, download=True)) assert b'Datei' in rv.data rv = self.app.get(url_for('code', code='reference', download=True)) assert b'https://openatlas.eu' in rv.data rv = self.app.get( url_for('system_class', system_class='appellation', download=True)) assert b'Cargo hauler' in rv.data rv = self.app.get(url_for('class', class_code='E31', download=True)) assert b'https://openatlas.eu' in rv.data rv = self.app.get( url_for('node_entities', id_=unit_node.id, download=True)) assert b'Austria' in rv.data rv = self.app.get( url_for('node_entities_all', id_=unit_node.id, download=True)) assert b'Austria' in rv.data rv = self.app.get(url_for('query', classes='E31', download=True)) assert b'https://openatlas.eu' in rv.data rv = self.app.get(url_for('content', lang='de', download=True)) assert b'intro' in rv.data rv = self.app.get(url_for('overview_count', download=True)) assert b'systemClass' in rv.data rv = self.app.get(url_for('class_mapping', download=True)) assert b'systemClass' in rv.data rv = self.app.get(url_for('node_overview', download=True)) assert b'Actor' in rv.data # Path with export rv = self.app.get(url_for('entity', id_=place.id, export='csv')) assert b'Nostromos' in rv.data rv = self.app.get(url_for('class', class_code='E18', export='csv')) assert b'Nostromos' in rv.data rv = self.app.get( url_for('system_class', system_class='place', export='csv')) assert b'Nostromos' in rv.data rv = self.app.get(url_for('code', code='reference', export='csv')) assert b'https://openatlas.eu' in rv.data # Testing Subunit rv = self.app.get(url_for('subunit', id_=place.id)) assert b'Feature' in rv.data and b'Strato' not in rv.data rv = self.app.get(url_for('subunit', id_=place.id, download=True)) assert b'Feature' in rv.data and b'Strato' not in rv.data rv = self.app.get(url_for('subunit', id_=place.id, count=True)) assert b'1' in rv.data rv = self.app.get(url_for('subunit_hierarchy', id_=place.id)) assert b'Strato' in rv.data rv = self.app.get( url_for('subunit_hierarchy', id_=place.id, download=True)) assert b'Strato' in rv.data rv = self.app.get( url_for('subunit_hierarchy', id_=place.id, count=True)) assert b'2' in rv.data # Parameter: filter rv = self.app.get( url_for('code', code='place', limit=10, sort='desc', column='name', filter='or|name|like|Nostromos')) assert b'Nostromos' in rv.data rv = self.app.get(url_for('code', code='reference')) assert b'openatlas' in rv.data rv = self.app.get( url_for('class', class_code='E18', filter='or|name|like|Nostr')) assert b'Nostromos' in rv.data rv = self.app.get( url_for('code', code='place', filter='or|id|eq|' + str(place.id))) assert b'Nostromos' in rv.data rv = self.app.get( url_for('code', code='place', filter='or|begin_from|ge|2018-1-1')) assert b'Nostromos' in rv.data # Parameter: last rv = self.app.get(url_for('class', class_code='E18', last=place.id)) assert b'entities' in rv.data # Parameter: first rv = self.app.get( url_for('class', class_code='E18', first=place.id)) assert b'entities' in rv.data # Parameter: show rv = self.app.get(url_for('class', class_code='E31', show='types')) assert b'https://openatlas.eu' in rv.data rv = self.app.get(url_for('class', class_code='E18', show='when')) assert b'Nostromos' in rv.data rv = self.app.get(url_for('class', class_code='E31', show='none')) assert b'https://openatlas.eu' in rv.data # Parameter: count rv = self.app.get(url_for('class', class_code='E31', count=True)) assert b'2' in rv.data rv = self.app.get(url_for('code', code='place', count=True)) assert b'3' in rv.data rv = self.app.get( url_for('system_class', system_class='appellation', count=True)) assert b'1' in rv.data rv = self.app.get( url_for('query', entities=place.id, classes='E18', codes='place')) assert b'Nostromos' in rv.data rv = self.app.get( url_for('query', entities=place.id, classes='E18', codes='place', count=True)) assert b'7' in rv.data rv = self.app.get( url_for('node_entities', id_=unit_node.id, count=True)) assert b'6' in rv.data rv = self.app.get( url_for('node_entities_all', id_=unit_node.id, count=True)) assert b'8' in rv.data
def test_place(self) -> None: with app.app_context(): # type: ignore rv = self.app.get(url_for('insert', class_='place')) assert b'+ Place' in rv.data with app.test_request_context(): app.preprocess_request() # type: ignore unit_node = Node.get_hierarchy('Administrative unit') unit_sub1 = g.nodes[unit_node.subs[0]] unit_sub2 = g.nodes[unit_node.subs[1]] reference = Entity.insert('external_reference', 'https://openatlas.eu') place_node = Node.get_hierarchy('Place') source = Entity.insert('source', 'Necronomicon') geonames = \ f"reference_system_id_" \ f"{ReferenceSystem.get_by_name('GeoNames').id}" precision = Node.get_hierarchy('External reference match').subs[0] data = { 'name': 'Asgard', 'alias-0': 'Valhöll', unit_node.id: str([unit_sub1.id, unit_sub2.id]), geonames: '123456', self.precision_geonames: precision, self.precision_wikidata: '' } rv = self.app.post(url_for('insert', class_='place', origin_id=reference.id), data=data, follow_redirects=True) assert b'Asgard' in rv.data \ and b'An entry has been created' in rv.data rv = self.app.get(url_for('entity_view', id_=precision)) assert b'Asgard' in rv.data rv = self.app.get( url_for('entity_view', id_=ReferenceSystem.get_by_name('GeoNames').id)) assert b'Asgard' in rv.data data['gis_points'] = """[{ "type": "Feature", "geometry": {"type":"Point","coordinates":[9,17]}, "properties": { "name": "Valhalla", "description": "", "shapeType": "centerpoint"}}]""" data['gis_lines'] = """[{ "type": "Feature", "geometry":{ "type": "LineString", "coordinates": [ [9.75307425847859,17.8111792731339], [9.75315472474904,17.8110005175436], [9.75333711496205,17.8110873417098]]}, "properties": { "name": "", "description": "", "shapeType": "line"}}]""" data['gis_polygons'] = """[{ "type": "Feature", "geometry": { "type": "Polygon", "coordinates": [[ [9.75307425847859,17.8111792731339], [9.75315472474904,17.8110005175436], [9.75333711496205,17.8110873417098], [9.75307425847859,17.8111792731339]]]}, "properties":{ "name": "", "description": "", "shapeType": "shape"}}]""" data[place_node.id] = place_node.subs data['continue_'] = 'yes' rv = self.app.post(url_for('insert', class_='place', origin_id=source.id), data=data, follow_redirects=True) assert b'Necronomicon' in rv.data with app.test_request_context(): app.preprocess_request() # type: ignore places = Entity.get_by_class('place') place = places[0] place2 = places[1] location = place2.get_linked_entity_safe('P53') actor = Entity.insert('person', 'Milla Jovovich') actor.link('P74', location) assert b'Necronomicon' in rv.data rv = self.app.get(url_for('index', view='place')) assert b'Asgard' in rv.data rv = self.app.get(url_for('update', id_=place.id)) assert b'Valhalla' in rv.data data['continue_'] = '' data['alias-1'] = 'Val-hall' data['geonames_id'] = '321' rv = self.app.post(url_for('update', id_=place.id), data=data, follow_redirects=True) assert b'Val-hall' in rv.data # Test error when viewing the corresponding location rv = self.app.get(url_for('entity_view', id_=place.id + 1)) assert b'be viewed directly' in rv.data # Test with same GeoNames id rv = self.app.post(url_for('update', id_=place.id), data=data, follow_redirects=True) assert b'Val-hall' in rv.data # Test with same GeoNames id but different precision data['geonames_precision'] = '' rv = self.app.post(url_for('update', id_=place.id), data=data, follow_redirects=True) assert b'Val-hall' in rv.data # Test update without the previous GeoNames id data['geonames_id'] = '' rv = self.app.post(url_for('update', id_=place.id), data=data, follow_redirects=True) assert b'Val-hall' in rv.data with app.test_request_context(): app.preprocess_request() # type: ignore event = Entity.insert('acquisition', 'Valhalla rising') event.link('P7', location) event.link('P24', location) rv = self.app.get(url_for('entity_view', id_=place2.id)) assert rv.data and b'Valhalla rising' in rv.data # Test invalid geom data['gis_polygons'] = """[{ "type": "Feature", "geometry": { "type": "Polygon", "coordinates": [[ [298.9893436362036, -5.888919049309554], [299.00444983737543, -5.9138487869408545], [299.00650977389887, -5.893358673645309], [298.9848804404028, -5.9070188333813585], [298.9893436362036, -5.888919049309554]]]}, "properties": { "name": "", "description": "", "shapeType": "shape"}}]""" rv = self.app.post(url_for('insert', class_='place', origin_id=source.id), data=data, follow_redirects=True) assert b'An invalid geometry was entered' in rv.data # Test Overlays path = \ pathlib.Path(app.root_path) \ / 'static' / 'images' / 'layout' / 'logo.png' with open(path, 'rb') as img: rv = self.app.post(url_for('insert', class_='file', origin_id=place.id), data={ 'name': 'X-Files', 'file': img }, follow_redirects=True) assert b'An entry has been created' in rv.data with app.test_request_context(): app.preprocess_request() # type: ignore file = Entity.get_by_class('file')[0] link_id = Link.insert(file, 'P67', place)[0] rv = self.app.get( url_for('overlay_insert', image_id=file.id, place_id=place.id, link_id=link_id)) assert b'X-Files' in rv.data data = { 'top_left_easting': 42, 'top_left_northing': 12, 'top_right_easting': 43, 'top_right_northing': 13, 'bottom_left_easting': 10, 'bottom_left_northing': 20 } rv = self.app.post(url_for('overlay_insert', image_id=file.id, place_id=place.id, link_id=link_id), data=data, follow_redirects=True) assert b'Edit' in rv.data if os.name == "posix": # Ignore for other OS e.g. Windows with app.test_request_context(): app.preprocess_request() # type: ignore overlay = Overlay.get_by_object(place) overlay_id = overlay[list(overlay.keys())[0]].id rv = self.app.get( url_for('overlay_update', id_=overlay_id, place_id=place.id, link_id=link_id)) assert b'42' in rv.data rv = self.app.post(url_for('overlay_update', id_=overlay_id, place_id=place.id, link_id=link_id), data=data, follow_redirects=True) assert b'Changes have been saved' in rv.data self.app.get(url_for('overlay_remove', id_=overlay_id, place_id=place.id), follow_redirects=True) # Add to place rv = self.app.get(url_for('entity_add_file', id_=place.id)) assert b'Link file' in rv.data rv = self.app.post(url_for('entity_add_file', id_=place.id), data={'checkbox_values': str([file.id])}, follow_redirects=True) assert b'X-Files' in rv.data rv = self.app.get( url_for('reference_add', id_=reference.id, view='place')) assert b'Val-hall' in rv.data rv = self.app.get(url_for('entity_add_reference', id_=place.id)) assert b'Link reference' in rv.data rv = self.app.post(url_for('entity_add_reference', id_=place.id), data={ 'reference': reference.id, 'page': '777' }, follow_redirects=True) assert b'777' in rv.data # Place types rv = self.app.get(url_for('node_move_entities', id_=unit_sub1.id)) assert b'Asgard' in rv.data # Test move entities of multiple node if link to new node exists rv = self.app.post(url_for('node_move_entities', id_=unit_sub1.id), follow_redirects=True, data={ unit_node.id: unit_sub2.id, 'selection': location.id, 'checkbox_values': str([location.id]) }) assert b'Entities were updated' in rv.data # Test move entities of multiple node rv = self.app.post(url_for('node_move_entities', id_=unit_sub2.id), follow_redirects=True, data={ unit_node.id: unit_sub1.id, 'selection': location.id, 'checkbox_values': str([location.id]) }) assert b'Entities were updated' in rv.data # Subunits data = { 'name': "Try continue", 'continue_': 'sub', self.precision_geonames: precision, self.precision_wikidata: '' } self.app.get(url_for('insert', class_='place')) rv = self.app.post(url_for('insert', class_='place'), data=data, follow_redirects=True) assert b'Insert and add strati' in rv.data data['name'] = "It's not a bug, it's a feature!" rv = self.app.get( url_for('insert', class_='stratigraphic_unit', origin_id=place.id)) assert b'Insert and add find' in rv.data rv = self.app.post(url_for('insert', class_='place', origin_id=place.id), data=data) feat_id = rv.location.split('/')[-1] self.app.get(url_for('insert', class_='place', origin_id=feat_id)) self.app.get(url_for('update', id_=feat_id)) self.app.post(url_for('update', id_=feat_id), data=data) data['name'] = "I'm a stratigraphic unit" rv = self.app.post(url_for('insert', class_='place', origin_id=feat_id), data=data) stratigraphic_id = rv.location.split('/')[-1] self.app.get( url_for('insert', class_='place', origin_id=stratigraphic_id)) self.app.get(url_for('update', id_=stratigraphic_id)) self.app.post(url_for('update', id_=stratigraphic_id), data={'name': "I'm a stratigraphic unit"}) dimension_node_id = Node.get_hierarchy('Dimensions').subs[0] data = { 'name': 'You never find me', dimension_node_id: 50, self.precision_geonames: precision, self.precision_wikidata: '' } rv = self.app.post(url_for('insert', class_='find', origin_id=stratigraphic_id), data=data) find_id = rv.location.split('/')[-1] rv = self.app.post(url_for('update', id_=find_id), data=data, follow_redirects=True) assert b'50' in rv.data self.app.get(url_for('update', id_=find_id)) data = { 'name': 'My human remains', self.precision_geonames: precision, self.precision_wikidata: '' } rv = self.app.post(url_for('insert', class_='human_remains', origin_id=stratigraphic_id), data=data) human_remains_id = rv.location.split('/')[-1] rv = self.app.get(url_for('update', id_=human_remains_id)) assert b'My human remains' in rv.data rv = self.app.get('/') assert b'My human remains' in rv.data rv = self.app.get(url_for('entity_view', id_=feat_id)) assert b'not a bug' in rv.data rv = self.app.get(url_for('entity_view', id_=stratigraphic_id)) assert b'a stratigraphic unit' in rv.data rv = self.app.get(url_for('entity_view', id_=find_id)) assert b'You never' in rv.data rv = self.app.get(url_for('index', view='place', delete_id=place.id), follow_redirects=True) assert b'not possible if subunits' in rv.data rv = self.app.get(url_for('index', view='place', delete_id=find_id), follow_redirects=True) assert b'The entry has been deleted.' in rv.data rv = self.app.get( url_for('index', view='place', delete_id=place2.id)) assert b'The entry has been deleted.' in rv.data
def get_all( objects: Optional[List[Entity]] = None, structure: Optional[Dict[str, Any]] = None) -> Dict[str, List[Any]]: if not objects: objects = [] all_: Dict[str, List[Any]] = { 'point': [], 'linestring': [], 'polygon': [] } extra: Dict[str, List[Any]] = { 'supers': [], 'subs': [], 'siblings': [] } selected: Dict[str, List[Any]] = { 'point': [], 'linestring': [], 'polygon': [], 'polygon_point': [] } # Include GIS of subunits which would be otherwise omitted subunit_ids = [ subunit.id for subunit in structure['subunits']] \ if structure else [] sibling_ids = [ sibling.id for sibling in structure['siblings']] \ if structure else [] extra_ids = [0] if structure: extra_ids = [ objects[0].id if objects else 0] \ + [structure['super_id']] \ + subunit_ids \ + sibling_ids object_ids = [x.id for x in objects] if objects else [] for shape in ['point', 'polygon', 'linestring']: place_root = Node.get_hierarchy('Place') for row in Db.get_by_shape(shape, extra_ids): description = row['description'].replace('"', '\"') \ if row['description'] else '' object_desc = row['object_desc'].replace('"', '\"') \ if row['object_desc'] else '' item = { 'type': 'Feature', 'geometry': json.loads(row['geojson']), 'properties': { 'objectId': row['object_id'], 'objectName': row['object_name'].replace('"', '\"'), 'objectDescription': object_desc, 'id': row['id'], 'name': row['name'].replace('"', '\"') if row['name'] else '', 'description': description, 'shapeType': row['type'] } } if 'types' in row and row['types']: nodes_list = ast.literal_eval('[' + row['types'] + ']') for node_id in list(set(nodes_list)): node = g.nodes[node_id] if node.root and node.root[-1] == place_root.id: item['properties']['objectType'] = \ node.name.replace('"', '\"') break if structure and row['object_id'] == structure['super_id']: extra['supers'].append(item) elif row['object_id'] in object_ids: selected[shape].append(item) elif row['object_id'] in subunit_ids: # pragma no cover extra['subs'].append(item) elif row['object_id'] in sibling_ids: # pragma no cover extra['siblings'].append(item) else: all_[shape].append(item) if 'polygon_point' in row: polygon_point_item = dict(item) # Make a copy polygon_point_item['geometry'] = json.loads( row['polygon_point']) if row['object_id'] in object_ids: selected['polygon_point'].append(polygon_point_item) elif row['object_id'] and structure and \ row['object_id'] == structure['super_id']: extra['supers'].append(polygon_point_item) elif row['object_id'] in subunit_ids: # pragma no cover extra['subs'].append(polygon_point_item) elif row['object_id'] in sibling_ids: # pragma no cover extra['siblings'].append(polygon_point_item) else: all_['point'].append(polygon_point_item) return { 'gisPointAll': json.dumps(all_['point']), 'gisPointSelected': json.dumps(selected['point']), 'gisPointSupers': json.dumps(extra['supers']), 'gisPointSubs': json.dumps(extra['subs']), 'gisPointSibling': json.dumps(extra['siblings']), 'gisLineAll': json.dumps(all_['linestring']), 'gisLineSelected': json.dumps(selected['linestring']), 'gisPolygonAll': json.dumps(all_['polygon']), 'gisPolygonSelected': json.dumps(selected['polygon']), 'gisPolygonPointSelected': json.dumps(selected['polygon_point']), 'gisAllSelected': json.dumps(selected['polygon'] + selected['linestring'] + selected['point']) }