def emit(self, record): session = Session() trace = None exc = record.__dict__["exc_info"] if exc: trace = traceback.format_exc() log = Log( logger=record.__dict__["name"], level=record.__dict__["levelname"], trace=trace, msg=record.__dict__["msg"], ) session.add(log) session.commit() session.close()
def push_descriptors(): session = Session() descriptors = DescriptorsLoader([SCHEMA_DIR], []) for desc in descriptors.iter_descriptors(): try: row = (session.query(Checksum).filter( Checksum.data_resource == desc.table_name).first()) row.descriptor_json = desc.descriptor session.add(row) session.commit() except Exception: logger.exception("Error pushing descriptors") continue finally: session.close()
def add_model_checksum( self, table_name: str, model_checksum: str = "0", descriptor_json: dict = {} ): """Adds a new checksum for a data model. Args: table_name (str): Name of the table to add the checksum. checksum (str): Checksum value. """ session = Session() try: checksum = Checksum() checksum.data_resource = table_name checksum.model_checksum = model_checksum checksum.descriptor_json = descriptor_json session.add(checksum) session.commit() except Exception: logger.exception("Error adding checksum") finally: session.close()
def save_migration(file_name: str, file_blob) -> None: """This function is called by alembic as a post write hook. It will take a migration file and save it to the database. """ logger.info("Trying to save migration files to DB...") session = Session() try: new_migration = Migrations() new_migration.file_name = file_name new_migration.file_blob = file_blob result = ( session.query(Migrations) .filter(Migrations.file_name == file_name) .count() ) if result == 0: session.add(new_migration) session.commit() except Exception: logger.exception("Failed to save migration files to DB.") finally: session.close()
def insert_one(self, data_model, data_resource_name, table_schema, request_obj): """Insert a new object. Args: data_model (object): SQLAlchemy ORM model. data_resource_name (str): Name of the data resource. table_schema (dict): The Table Schema object to use for validation. request_obj (dict): HTTP request object. Return: dict, int: The response object and associated HTTP status code. """ try: request_obj = request_obj.json except Exception: raise ApiError("No request body found.", 400) _ = Schema(table_schema) errors = [] accepted_fields = [] if not validate(table_schema): raise SchemaValidationFailure() # Check for required fields for field in table_schema["fields"]: accepted_fields.append(field["name"]) if field["required"] and not field["name"] in request_obj.keys(): errors.append(f"Required field '{field['name']}' is missing.") valid_fields = [] many_query = [] for field in request_obj.keys(): if field in accepted_fields: valid_fields.append(field) else: junc_table = JuncHolder.lookup_table(field, data_resource_name) if junc_table is not None: values = request_obj[field] if not isinstance(values, list): values = [values] many_query.append([field, values, junc_table]) else: errors.append(f"Unknown field '{field}' found.") if len(errors) > 0: raise ApiError("Invalid request body.", 400, errors) try: session = Session() new_object = data_model() for field in valid_fields: value = request_obj[field] setattr(new_object, field, value) session.add(new_object) session.commit() id_value = getattr(new_object, table_schema["primaryKey"]) # process the many_query for field, values, table in many_query: self.process_many_query(session, table, id_value, field, data_resource_name, values) return { "message": "Successfully added new resource.", "id": id_value }, 201 except Exception: raise ApiUnhandledError("Failed to create new resource.", 400) finally: session.close()