def before_request(): """ Before request gather log details """ g.start_time = time.time() g.request_id = str(ulid.new()) get_log().info("CorrelationId", correlationId=request.headers.get("x-linz-correlation-id", str(ulid.new())))
def handle_data_error(e: DataError): """ Custom error """ response_body = {"message": e.msg} get_log().info("ResponseMessage", message=e.msg, httpCode=e.http_code) return jsonify(response_body), e.http_code
def handle_http_error(e: HTTPException): """ Catch HTTP exceptions """ response_body = {"message": e.description} get_log().error("Exception", exception=e) return jsonify(response_body), e.code
def handle_error(e: Exception): """ Catch python standard exception """ response_body = {"message": "Internal Server Error"} get_log().error("Exception", exception=e) return jsonify(response_body), 500
def validate_qgis_version(qgis_version): """ Ensure the query parameter is a valid version string :param qgis_version: qgis version to filter by :type qgis_version: string """ try: StrictVersion(qgis_version) except ValueError: get_log().error("Invalid QGIS version") raise DataError(400, "Invalid QGIS version")
def validate_stage(plugin_stage): """ # As query params that are not "?qgis=x"can not be sent via QGIS, # at this stage only dev and prd stages are able to be stored # dev is opt in via `?stage=dev. If the `stage` query param is not supplied # the API interactions are considered to be for prd plugins """ if plugin_stage not in ["dev", ""]: get_log().error("stage is not recognised") raise DataError(400, "stage is not recognised") return plugin_stage
def get_access_token(headers): """ Parse the bearer token """ auth_header = headers.get("authorization", None) if not auth_header: get_log().error("InvalidToken") raise DataError(403, "Invalid token") if not auth_header.lower().startswith(AUTH_PREFIX): get_log().error("InvalidToken", authHeader=auth_header) raise DataError(403, "Invalid token") return auth_header[len(AUTH_PREFIX) :]
def health(): """ Ping to confirm the service is up """ checks = {} # check database connection MetadataModel.all_version_zeros(DEFUALT_STAGE) checks["db"] = {"status": "ok"} # check s3 connection aws.s3_head_bucket(repo_bucket_name) checks["s3"] = {"status": "ok"} # Anything not a 200 has been caught as an # error and the health-checks have failed get_log().info({"status": "ok", "details": checks}) return format_response({"status": "ok"}, 200)
def upload(plugin_id): """ End point for processing QGIS plugin data POSTed by the user If the query param "?stage" is not supplied, plugins POSTed are considered production. if `?stage=dev` is used the plugin is stored as a dev version :param data: plugin :type data: binary data :returns: tuple (http response, http code) :rtype: tuple (flask.wrappers.Response, int) """ plugin_stage = request.args.get("stage", DEFUALT_STAGE) validate_stage(plugin_stage) post_data = request.get_data() if not post_data: get_log().error("NoDataSupplied") raise DataError(400, "No plugin file supplied") # Get users access token from header token = get_access_token(request.headers) MetadataModel.validate_token(token, plugin_id, plugin_stage) # Test the file is a zipfile if not zipfile.is_zipfile(BytesIO(post_data)): get_log().error("NotZipfile") raise DataError(400, "Plugin file supplied not a Zipfile") # Extract plugin metadata plugin_zipfile = zipfile.ZipFile(BytesIO(post_data), "r", zipfile.ZIP_DEFLATED, False) metadata_path = plugin_parser.metadata_path(plugin_zipfile) metadata = plugin_parser.metadata_contents(plugin_zipfile, metadata_path) # Get the plugins root dir. This is what QGIS references when handling plugins g.plugin_id = plugin_parser.zipfile_root_dir(plugin_zipfile) if g.plugin_id != plugin_id: raise DataError(400, "Invalid plugin name %s" % g.plugin_id) # Allocate a filename filename = str(uuid.uuid4()) get_log().info("FileName", filename=filename) # Upload the plugin to s3 aws.s3_put(post_data, repo_bucket_name, filename, g.plugin_id) get_log().info("UploadedTos3", filename=filename, bucketName=repo_bucket_name) # Update metadata database try: plugin_metadata = MetadataModel.new_plugin_version(metadata, g.plugin_id, filename, plugin_stage) except ValueError as error: raise DataError(400, str(error)) return format_response(plugin_metadata, 201)
def validate_token(cls, token, plugin_id, plugin_stage): """ Check the bearer-token against the plugins secret to ensure the user can modify the plugin. :param token: bearer-token as submitted by user. Ensures user can modify plugin record :type token: str :param plugin_id: plugin root folder. Makes up the PK :type plugin_id: str """ result = cls.get_plugin_item(plugin_id, plugin_stage, "metadata") try: metadata = next(result) except StopIteration: get_log().error("PluginNotFound") raise DataError(400, "Plugin Not Found") if hash_token(token) != metadata.secret: get_log().error("InvalidToken") raise DataError(403, "Invalid token")
def after_request(response): """ After request logout API/Lambda metrics """ get_log().info( "RequestMetadata", uri=request.path, method=request.method, status=response.status, args=dict(request.args), duration=(time.time() - g.start_time) * 1000, lambdaName=os.environ["AWS_LAMBDA_FUNCTION_NAME"], lambdaMemory=os.environ["AWS_LAMBDA_FUNCTION_MEMORY_SIZE"], lambdaVersion=os.environ["AWS_LAMBDA_FUNCTION_VERSION"], lambdaLogStreamName=os.environ["AWS_LAMBDA_LOG_STREAM_NAME"], lambdaRegion=os.environ["AWS_REGION"], ) response.headers["X-Request-ID"] = g.request_id return response
def archive_plugin(cls, plugin_id, plugin_stage): """ Retire plugin by adding a enddate to the metadata record :param plugin_id: plugin Id. Makes up the PK :type plugin_id: str :returns: json describing archived plugin metadata :rtype: json """ result = cls.get_plugin_item(plugin_id, plugin_stage) try: version_zero = next(result) except StopIteration: get_log().error("PluginNotFound") raise DataError(400, "Plugin Not Found") version_zero.update(actions=[ cls.ended_at.set(datetime.now()), cls.updated_at.set(datetime.now()), cls.revisions.set(version_zero.revisions + 1), ]) # Insert former v0 into revision cls.insert_revision(version_zero.attribute_values) get_log().info("RevisionInserted", pluginId=plugin_id, revision=version_zero.revisions) updated_metadata = json.loads( json.dumps(version_zero.attribute_values, cls=ModelEncoder)) get_log().info("MetadataStored", metadata=updated_metadata) return updated_metadata
def new_plugin_version(cls, metadata, plugin_id, filename, plugin_stage): """ If a new version of an existing plugin is submitted via the API update the version zero record with its details and store a revision of the former version zero entry for database audit purposes. :param metadata: ConfigParser representation of metadata.txt :type metadata: configparser.ConfigParser :param plugin_id: plugin root folder. Makes up the PK :type plugin_id: str :param filename: filename of plugin.zip in datastore (currently s3) :type filename: str :param plugin_stage: plugins stage (dev or prd) :type stage: str :returns: json describing plugin metadata :rtype: json """ result = cls.get_plugin_item(plugin_id, plugin_stage) try: version_zero = next(result) except StopIteration: get_log().error("PluginNotFound") raise DataError(400, "Plugin Not Found") # Update version zero cls.update_version_zero(metadata, version_zero, filename) get_log().info("VersionZeroUpdated") # Insert v0 into revision cls.insert_revision(version_zero.attribute_values) get_log().info("RevisionInserted", pluginId=plugin_id, revision=version_zero.revisions) updated_metadata = json.loads( json.dumps(version_zero.attribute_values, cls=ModelEncoder)) get_log().info("RevisionInserted", pluginId=plugin_id, stage=plugin_stage, revision=version_zero.revisions) return updated_metadata