class OccurrenceSchema(MA.SQLAlchemyAutoSchema): class Meta: model = TOccurrencesOccurrence load_instance = True include_fk = True cor_counting_occtax = MA.Nested(CountingSchema, many=True) taxref = MA.Nested(TaxrefSchema, dump_only=True)
class OccurrenceSchema(MA.SQLAlchemyAutoSchema): class Meta: model = TOccurrencesOccurrence load_instance = True include_fk = True additional_fields = fields.Raw(allow_none=False, required=True) # additional_fields = fields.Raw(load_only=True) cor_counting_occtax = MA.Nested(CountingSchema, many=True) taxref = MA.Nested(TaxrefSchema, dump_only=True) pre_dump_fn = remove_additional_none_val
class DatasetActorSchema(MA.SQLAlchemyAutoSchema): class Meta: model = CorDatasetActor load_instance = True include_fk = True role = MA.Nested(UserSchema, dump_only=True) nomenclature_actor_role = MA.Nested(NomenclatureSchema, dump_only=True) organism = MA.Nested(OrganismeSchema, dump_only=True) @pre_load def make_dataset_actor(self, data, **kwargs): if data.get("id_cda") is None: data.pop("id_cda", None) return data
class AcquisitionFrameworkActorSchema(MA.SQLAlchemyAutoSchema): class Meta: model = CorAcquisitionFrameworkActor load_instance = True include_fk = True role = MA.Nested(UserSchema, dump_only=True) nomenclature_actor_role = MA.Nested(NomenclatureSchema, dump_only=True) organism = MA.Nested(OrganismeSchema, dump_only=True) cor_volets_sinp = MA.Nested(OrganismeSchema, dump_only=True) @pre_load def make_af_actor(self, data, **kwargs): if data.get("id_cafa") is None: data.pop("id_cafa", None) return data
class AcquisitionFrameworkSchema(MetadataSchema): class Meta: model = TAcquisitionFramework load_instance = True include_fk = True meta_create_date = fields.DateTime(dump_only=True) meta_update_date = fields.DateTime(dump_only=True) t_datasets = MA.Nested( DatasetSchema( exclude=( "acquisition_framework", "modules", "nomenclature_dataset_objectif", "nomenclature_collecting_method", "nomenclature_data_origin", "nomenclature_source_status", "nomenclature_resource_type", ), many=True, ), many=True ) bibliographical_references = MA.Nested( BibliographicReferenceSchema( exclude=( "acquisition_framework", ), many=True, ) , many=True, ) cor_af_actor = MA.Nested( AcquisitionFrameworkActorSchema, many=True ) cor_volets_sinp = MA.Nested( NomenclatureSchema, many=True, ) cor_objectifs = MA.Nested( NomenclatureSchema, many=True ) cor_territories = MA.Nested( NomenclatureSchema, many=True ) nomenclature_territorial_level = MA.Nested(NomenclatureSchema, dump_only=True) nomenclature_financing_type = MA.Nested(NomenclatureSchema, dump_only=True) creator = MA.Nested(UserSchema, dump_only=True)
class CountingSchema(MA.SQLAlchemyAutoSchema): class Meta: model = CorCountingOccurrence load_instance = True medias = MA.Nested(MediaSchema, many=True) @pre_load def make_counting(self, data, **kwargs): if data.get("id_counting_occtax") is None: data.pop("id_counting_occtax", None) return data
class ReleveSchema(MA.SQLAlchemyAutoSchema): class Meta: model = TRelevesOccurrence load_instance = True include_fk = True exclude = ("geom_local", ) date_min = fields.Date(format="%Y-%m-%d") date_max = fields.Date(format="%Y-%m-%d") hour_min = fields.Time(format="%H:%M", allow_none=True) hour_max = fields.Time(format="%H:%M", allow_none=True) geom_4326 = GeojsonSerializationField() id_digitiser = MA.auto_field(dump_only=True) t_occurrences_occtax = MA.Nested(OccurrenceSchema, many=True) observers = MA.Nested( ObserverSchema, many=True, allow_none=current_app.config.get("OCCTAX", {}).get("observers_txt", True), ) digitiser = MA.Nested(ObserverSchema, dump_only=True) dataset = MA.Nested(DatasetSchema, dump_only=True) habitat = MA.Nested(HabrefSchema, dump_only=True) @pre_load def make_releve(self, data, **kwargs): if data.get("observers") is None: data["observers"] = [] if data.get("id_releve_occtax") is None: data.pop("id_releve_occtax", None) return data
class BibliographicReferenceSchema(MetadataSchema): class Meta: model = TBibliographicReference load_instance = True include_fk = True acquisition_framework = MA.Nested("AcquisitionFrameworkSchema", exclude=("bibliographical_references",), dump_only=True) @pre_load def make_biblio_ref(self, data, **kwargs): print(data) if data.get("id_bibliographic_reference") is None: data.pop("id_bibliographic_reference", None) return data
class BibliographicReferenceSchema(CruvedSchemaMixin, SmartRelationshipsMixin, MA.SQLAlchemyAutoSchema): class Meta: model = TBibliographicReference load_instance = True include_fk = True acquisition_framework = MA.Nested("AcquisitionFrameworkSchema", dump_only=True) @pre_load def make_biblio_ref(self, data, **kwargs): if data.get("id_bibliographic_reference") is None: data.pop("id_bibliographic_reference", None) return data
class AcquisitionFrameworkSchema(CruvedSchemaMixin, SmartRelationshipsMixin, MA.SQLAlchemyAutoSchema): class Meta: model = TAcquisitionFramework load_instance = True include_fk = True meta_create_date = fields.DateTime(dump_only=True) meta_update_date = fields.DateTime(dump_only=True) t_datasets = MA.Nested(DatasetSchema, many=True) bibliographical_references = MA.Nested(BibliographicReferenceSchema, many=True) cor_af_actor = MA.Nested(AcquisitionFrameworkActorSchema, many=True, unknown=EXCLUDE) cor_volets_sinp = MA.Nested(NomenclatureSchema, many=True, unknown=EXCLUDE) cor_objectifs = MA.Nested(NomenclatureSchema, many=True, unknown=EXCLUDE) cor_territories = MA.Nested(NomenclatureSchema, many=True, unknown=EXCLUDE) nomenclature_territorial_level = MA.Nested(NomenclatureSchema, dump_only=True) nomenclature_financing_type = MA.Nested(NomenclatureSchema, dump_only=True) creator = MA.Nested(UserSchema, dump_only=True)
def get_app(config, _app=None, with_external_mods=True, with_flask_admin=True): # Make sure app is a singleton if _app is not None: return _app app = Flask(__name__) app.config.update(config) # Bind app to DB DB.init_app(app) # For deleting files on "delete" media @before_models_committed.connect_via(app) def on_before_models_committed(sender, changes): for obj, change in changes: if change == "delete" and hasattr(obj, "__before_commit_delete__"): obj.__before_commit_delete__() # Bind app to MA MA.init_app(app) # Pass parameters to the usershub authenfication sub-module, DONT CHANGE THIS app.config["DB"] = DB # Pass parameters to the submodules app.config["MA"] = MA # Pass the ID_APP to the submodule to avoid token conflict between app on the same server app.config["ID_APP"] = app.config["ID_APPLICATION_GEONATURE"] with app.app_context(): if app.config["MAIL_ON_ERROR"] and app.config["MAIL_CONFIG"]: from geonature.utils.logs import mail_handler logging.getLogger().addHandler(mail_handler) # DB.create_all() if with_flask_admin: # from geonature.core.admin import flask_admin from geonature.core.admin.admin import flask_admin from pypnusershub.routes import routes app.register_blueprint(routes, url_prefix="/auth") from pypn_habref_api.routes import routes app.register_blueprint(routes, url_prefix="/habref") from pypnusershub import routes_register app.register_blueprint(routes_register.bp, url_prefix="/pypn/register") from pypnnomenclature.routes import routes app.register_blueprint(routes, url_prefix="/nomenclatures") from geonature.core.gn_permissions.routes import routes app.register_blueprint(routes, url_prefix="/permissions") from geonature.core.gn_permissions.backoffice.views import routes app.register_blueprint(routes, url_prefix="/permissions_backoffice") from geonature.core.routes import routes app.register_blueprint(routes, url_prefix="") from geonature.core.users.routes import routes app.register_blueprint(routes, url_prefix="/users") from geonature.core.gn_synthese.routes import routes app.register_blueprint(routes, url_prefix="/synthese") from geonature.core.gn_meta.routes import routes app.register_blueprint(routes, url_prefix="/meta") from geonature.core.ref_geo.routes import routes app.register_blueprint(routes, url_prefix="/geo") from geonature.core.gn_exports.routes import routes app.register_blueprint(routes, url_prefix="/exports") from geonature.core.auth.routes import routes app.register_blueprint(routes, url_prefix="/gn_auth") from geonature.core.gn_monitoring.routes import routes app.register_blueprint(routes, url_prefix="/gn_monitoring") from geonature.core.gn_commons.routes import routes app.register_blueprint(routes, url_prefix="/gn_commons") # Errors from geonature.core.errors import routes app.wsgi_app = ReverseProxied(app.wsgi_app, script_name=config["API_ENDPOINT"]) CORS(app, supports_credentials=True) # Emails configuration if app.config["MAIL_CONFIG"]: conf = app.config.copy() conf.update(app.config["MAIL_CONFIG"]) app.config = conf MAIL.init_app(app) # Loading third-party modules if with_external_mods: for conf, manifest, module in list_and_import_gn_modules(app): app.register_blueprint(module.backend.blueprint.blueprint, url_prefix=conf["MODULE_URL"]) _app = app return app
class Meta: model = TValidations load_instance = True include_fk = True validation_label = fields.Nested(NomenclatureSchema, dump_only=True) validator_role = MA.Nested(UserSchema, dump_only=True)
class DatasetSchema(MetadataSchema): class Meta: model = TDatasets load_instance = True include_fk = True meta_create_date = fields.DateTime(dump_only=True) meta_update_date = fields.DateTime(dump_only=True) cor_dataset_actor = MA.Nested( DatasetActorSchema, many=True ) modules = MA.Nested( ModuleSchema, many=True, exclude=("meta_create_date", "meta_update_date") ) creator = MA.Nested(UserSchema, dump_only=True) nomenclature_data_type = MA.Nested(NomenclatureSchema, dump_only=True) nomenclature_dataset_objectif = MA.Nested(NomenclatureSchema, dump_only=True) nomenclature_collecting_method = MA.Nested(NomenclatureSchema, dump_only=True) nomenclature_data_origin = MA.Nested(NomenclatureSchema, dump_only=True) nomenclature_source_status = MA.Nested(NomenclatureSchema, dump_only=True) nomenclature_resource_type = MA.Nested(NomenclatureSchema, dump_only=True) cor_territories = MA.Nested( NomenclatureSchema, many=True ) acquisition_framework = MA.Nested("AcquisitionFrameworkSchema", exclude=("t_datasets",), dump_only=True)
class DatasetSchema(CruvedSchemaMixin, SmartRelationshipsMixin, MA.SQLAlchemyAutoSchema): class Meta: model = TDatasets load_instance = True include_fk = True meta_create_date = fields.DateTime(dump_only=True) meta_update_date = fields.DateTime(dump_only=True) cor_dataset_actor = MA.Nested(DatasetActorSchema, many=True, unknown=EXCLUDE) modules = MA.Nested(ModuleSchema, many=True, exclude=("meta_create_date", "meta_update_date"), unknown=EXCLUDE) creator = MA.Nested(UserSchema, dump_only=True) nomenclature_data_type = MA.Nested(NomenclatureSchema, dump_only=True) nomenclature_dataset_objectif = MA.Nested(NomenclatureSchema, dump_only=True) nomenclature_collecting_method = MA.Nested(NomenclatureSchema, dump_only=True) nomenclature_data_origin = MA.Nested(NomenclatureSchema, dump_only=True) nomenclature_source_status = MA.Nested(NomenclatureSchema, dump_only=True) nomenclature_resource_type = MA.Nested(NomenclatureSchema, dump_only=True) cor_territories = MA.Nested(NomenclatureSchema, many=True, unknown=EXCLUDE) acquisition_framework = MA.Nested("AcquisitionFrameworkSchema", dump_only=True)
def create_app(with_external_mods=True): app = Flask(__name__.split('.')[0], static_folder="../static") app.config.update(config) api_uri = urlsplit(app.config['API_ENDPOINT']) app.config['APPLICATION_ROOT'] = api_uri.path app.config['PREFERRED_URL_SCHEME'] = api_uri.scheme if 'SCRIPT_NAME' not in os.environ: os.environ['SCRIPT_NAME'] = app.config['APPLICATION_ROOT'].rstrip('/') app.config['TEMPLATES_AUTO_RELOAD'] = True # disable cache for downloaded files (PDF file stat for ex) app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0 if len(app.config['SECRET_KEY']) < 20: raise Exception("The SECRET_KEY config option must have a length " "greater or equals to 20 characters.") # set from headers HTTP_HOST, SERVER_NAME, and SERVER_PORT app.wsgi_app = ProxyFix(app.wsgi_app, x_host=1) app.json_encoder = MyJSONEncoder # set logging config config_loggers(app.config) db.init_app(app) migrate.init_app(app, DB, directory=BACKEND_DIR / 'geonature' / 'migrations') MA.init_app(app) CORS(app, supports_credentials=True) # Emails configuration if app.config["MAIL_CONFIG"]: conf = app.config.copy() conf.update(app.config["MAIL_CONFIG"]) app.config = conf MAIL.init_app(app) # Pass parameters to the usershub authenfication sub-module, DONT CHANGE THIS app.config["DB"] = DB # Pass parameters to the submodules app.config["MA"] = MA # For deleting files on "delete" media @before_models_committed.connect_via(app) def on_before_models_committed(sender, changes): for obj, change in changes: if change == "delete" and hasattr(obj, "__before_commit_delete__"): obj.__before_commit_delete__() # setting g.current_user on each request @app.before_request def load_current_user(): try: g.current_user = user_from_token(request.cookies['token']).role except (KeyError, UnreadableAccessRightsError, AccessRightsExpiredError): g.current_user = None admin.init_app(app) # Pass the ID_APP to the submodule to avoid token conflict between app on the same server with app.app_context(): try: gn_app = Application.query.filter_by( code_application=config['CODE_APPLICATION']).one() except (ProgrammingError, NoResultFound): logging.warning( "Warning: unable to find GeoNature application, database not yet initialized?" ) else: app.config["ID_APP"] = app.config[ "ID_APPLICATION_GEONATURE"] = gn_app.id_application for blueprint_path, url_prefix in [ ('pypnusershub.routes:routes', '/auth'), ('pypn_habref_api.routes:routes', '/habref'), ('pypnusershub.routes_register:bp', '/pypn/register'), ('pypnnomenclature.routes:routes', '/nomenclatures'), ('geonature.core.gn_commons.routes:routes', '/gn_commons'), ('geonature.core.gn_permissions.routes:routes', '/permissions'), ('geonature.core.gn_permissions.backoffice.views:routes', '/permissions_backoffice'), ('geonature.core.routes:routes', '/'), ('geonature.core.users.routes:routes', '/users'), ('geonature.core.gn_synthese.routes:routes', '/synthese'), ('geonature.core.gn_meta.routes:routes', '/meta'), ('geonature.core.ref_geo.routes:routes', '/geo'), ('geonature.core.auth.routes:routes', '/gn_auth'), ('geonature.core.gn_monitoring.routes:routes', '/gn_monitoring'), ('geonature.core.gn_profiles.routes:routes', '/gn_profiles'), ]: module_name, blueprint_name = blueprint_path.split(':') blueprint = getattr(import_module(module_name), blueprint_name) app.register_blueprint(blueprint, url_prefix=url_prefix) with app.app_context(): # register errors handlers import geonature.core.errors # Loading third-party modules if with_external_mods: try: for module_object, module_config, module_blueprint in import_backend_enabled_modules( ): app.config[module_config['MODULE_CODE']] = module_config app.register_blueprint( module_blueprint, url_prefix=module_config['MODULE_URL']) except ProgrammingError as sqla_error: if isinstance(sqla_error.orig, UndefinedTable): logging.warning( "Warning: database not yet initialized, skipping loading of external modules" ) else: raise return app
def create_app(with_external_mods=True, with_flask_admin=True): app = Flask(__name__) app.config.update(config) # Bind app to DB DB.init_app(app) MAIL.init_app(app) # For deleting files on "delete" media @before_models_committed.connect_via(app) def on_before_models_committed(sender, changes): for obj, change in changes: if change == "delete" and hasattr(obj, "__before_commit_delete__"): obj.__before_commit_delete__() # Bind app to MA MA.init_app(app) # Pass parameters to the usershub authenfication sub-module, DONT CHANGE THIS app.config["DB"] = DB # Pass parameters to the submodules app.config["MA"] = MA # Pass the ID_APP to the submodule to avoid token conflict between app on the same server app.config["ID_APP"] = app.config["ID_APPLICATION_GEONATURE"] if with_flask_admin: from geonature.core.admin.admin import admin admin.init_app(app) with app.app_context(): if app.config["MAIL_ON_ERROR"] and app.config["MAIL_CONFIG"]: from geonature.utils.logs import mail_handler logging.getLogger().addHandler(mail_handler) from pypnusershub.routes import routes app.register_blueprint(routes, url_prefix="/auth") from pypn_habref_api.routes import routes app.register_blueprint(routes, url_prefix="/habref") from pypnusershub import routes_register app.register_blueprint(routes_register.bp, url_prefix="/pypn/register") from pypnnomenclature.routes import routes app.register_blueprint(routes, url_prefix="/nomenclatures") from geonature.core.gn_permissions.routes import routes app.register_blueprint(routes, url_prefix="/permissions") from geonature.core.gn_permissions.backoffice.views import routes app.register_blueprint(routes, url_prefix="/permissions_backoffice") from geonature.core.routes import routes app.register_blueprint(routes, url_prefix="") from geonature.core.users.routes import routes app.register_blueprint(routes, url_prefix="/users") from geonature.core.gn_synthese.routes import routes app.register_blueprint(routes, url_prefix="/synthese") from geonature.core.gn_meta.routes import routes app.register_blueprint(routes, url_prefix="/meta") from geonature.core.ref_geo.routes import routes app.register_blueprint(routes, url_prefix="/geo") from geonature.core.gn_exports.routes import routes app.register_blueprint(routes, url_prefix="/exports") from geonature.core.auth.routes import routes app.register_blueprint(routes, url_prefix="/gn_auth") from geonature.core.gn_monitoring.routes import routes app.register_blueprint(routes, url_prefix="/gn_monitoring") from geonature.core.gn_commons.routes import routes app.register_blueprint(routes, url_prefix="/gn_commons") # Errors from geonature.core.errors import routes CORS(app, supports_credentials=True) # Emails configuration if app.config["MAIL_CONFIG"]: conf = app.config.copy() conf.update(app.config["MAIL_CONFIG"]) app.config = conf MAIL.init_app(app) app.config['TEMPLATES_AUTO_RELOAD'] = True # disable cache for downloaded files (PDF file stat for ex) app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0 # Loading third-party modules if with_external_mods: for module, blueprint in import_backend_enabled_modules(): app.config[blueprint.config['MODULE_CODE']] = blueprint.config app.register_blueprint( blueprint, url_prefix=blueprint.config['MODULE_URL']) _app = app return app