Ejemplo n.º 1
0
def get_actions_api(hma_config_table: str) -> bottle.Bottle:
    # The documentation below expects prefix to be '/actions/'
    actions_api = bottle.Bottle()
    HMAConfig.initialize(hma_config_table)

    @actions_api.get("/", apply=[jsoninator])
    def fetch_all_actions() -> FetchAllActionsResponse:
        """
        Return all action configs.
        """
        action_configs = ActionPerformer.get_all()
        return FetchAllActionsResponse(
            actions_response=[config.__dict__ for config in action_configs]
        )

    @actions_api.put(
        "/<old_name>/<old_config_sub_stype>",
        apply=[jsoninator(CreateUpdateActionRequest)],
    )
    def update_action(
        request: CreateUpdateActionRequest, old_name: str, old_config_sub_stype: str
    ) -> UpdateActionResponse:
        """
        Update the action, name, url, and headers for action with name=<old_name> and subtype=<old_config_sub_stype>.
        """
        if old_name != request.name or old_config_sub_stype != request.config_subtype:
            # The name field can't be updated because it is the primary key
            # The config sub type can't be updated because it is the config class level param
            delete_action(old_name)
            create_action(request)
        else:
            config = ActionPerformer._get_subtypes_by_name()[
                request.config_subtype
            ].getx(request.name)
            for key, value in request.fields.items():
                setattr(config, key, value)
            hmaconfig.update_config(config)
        return UpdateActionResponse(response="The action config is updated.")

    @actions_api.post("/", apply=[jsoninator(CreateUpdateActionRequest)])
    def create_action(request: CreateUpdateActionRequest) -> CreateActionResponse:
        """
        Create an action.
        """
        config = ActionPerformer._get_subtypes_by_name()[request.config_subtype](
            **{"name": request.name, **request.fields}
        )
        hmaconfig.create_config(config)
        return CreateActionResponse(response="The action config is created.")

    @actions_api.delete("/<name>", apply=[jsoninator])
    def delete_action(name: str) -> DeleteActionResponse:
        """
        Delete the action with name=<name>.
        """
        hmaconfig.delete_config_by_type_and_name("ActionPerformer", name)
        return DeleteActionResponse(response="The action config is deleted.")

    return actions_api
Ejemplo n.º 2
0
def get_submit_api(dynamodb_table: Table, image_bucket_key: str,
                   image_folder_key: str) -> bottle.Bottle:
    """
    A Closure that includes all dependencies that MUST be provided by the root
    API that this API plugs into. Declare dependencies here, but initialize in
    the root API alone.
    """

    # A prefix to all routes must be provided by the api_root app
    # The documentation below expects prefix to be '/submit/'
    submit_api = bottle.Bottle()

    @submit_api.post("/", apply=[jsoninator(SubmitContentRequestBody)])
    def submit(
        request: SubmitContentRequestBody,
    ) -> t.Union[SubmitContentResponse, SubmitContentError]:
        """
        Endpoint to allow for the general submission of content to the system
        """

        assert isinstance(request, SubmitContentRequestBody)
        logger.debug(f"Content Submit Request Received {request.content_id}")

        if request.submission_type == "UPLOAD":
            fileName = request.content_id
            fileContents = base64.b64decode(request.content_ref)
            # TODO a whole bunch more validation and error checking...
            s3_client.put_object(
                Body=fileContents,
                Bucket=image_bucket_key,
                Key=f"{image_folder_key}{fileName}",
            )

            return SubmitContentResponse(content_id=request.content_id,
                                         submit_successful=True)

        # Other possible submission types are not supported so just echo content_id for testing
        bottle.response.status = 422
        return SubmitContentError(
            content_id=request.content_id,
            message="submission_type not yet supported",
        )

    return submit_api
Ejemplo n.º 3
0
def get_action_rules_api(hma_config_table: str) -> bottle.Bottle:
    # The endpoints below imply a prefix of '/action-rules'
    action_rules_api = bottle.Bottle()
    HMAConfig.initialize(hma_config_table)

    @action_rules_api.get("/", apply=[jsoninator])
    def get_action_rules() -> ActionRulesResponse:
        """
        Return all action rules.
        """
        error_message = ""
        action_rules = []

        try:
            action_rules = ActionRule.get_all()
            logger.info("action_rules: %s", action_rules)
        except Exception as e:
            error_message = "Unexpected error."
            handle_unexpected_error(e)

        return ActionRulesResponse(error_message, action_rules)

    @action_rules_api.post("/", apply=[jsoninator(ActionRulesRequest)])
    def create_action_rule(
        request: ActionRulesRequest,
    ) -> ActionRulesResponse:
        """
        Create an action rule.
        """
        logger.info("request: %s", request)
        error_message = ""

        try:
            hmaconfig.create_config(request.action_rule)
        except ClientError as e:
            # TODO this test for "already exists" should be moved to a common place
            if e.response["Error"]["Code"] == "ConditionalCheckFailedException":
                error_message = f"An action rule with the name '{request.action_rule.name}' already exists."
                logger.warning(
                    "Duplicate action rule creation attempted: %s",
                    e.response["Error"]["Message"],
                )
            else:
                error_message = "Unexpected error."
                logger.error(
                    "Unexpected client error: %s", e.response["Error"]["Message"]
                )
                logger.exception(e)
            response.status = 500
        except Exception as e:
            error_message = "Unexpected error."
            handle_unexpected_error(e)

        return ActionRulesResponse(error_message)

    @action_rules_api.put("/<old_name>", apply=[jsoninator(ActionRulesRequest)])
    def update_action_rule(
        request: ActionRulesRequest,
        old_name: str,
    ) -> ActionRulesResponse:
        """
        Update the action rule with name=<oldname>.
        """
        logger.info("old_name: %s", old_name)
        logger.info("request: %s", request)
        error_message = ""

        if ActionRule.exists(request.action_rule.name):
            try:
                hmaconfig.update_config(request.action_rule)
            except Exception as e:
                error_message = "Unexpected error."
                handle_unexpected_error(e)
        elif ActionRule.exists(old_name):
            try:
                hmaconfig.create_config(request.action_rule)
                hmaconfig.delete_config_by_type_and_name("ActionRule", old_name)
            except Exception as e:
                error_message = "Unexpected error."
                handle_unexpected_error(e)
        else:
            error_message = f"An action rule named '{request.action_rule.name}' or '{old_name}' does not exist."
            logger.warning(
                "An attempt was made to update an action rule named either '%s' or '%s' but neither exist.",
                request.action_rule.name,
                old_name,
            )
            response.status = 500

        return ActionRulesResponse(error_message)

    @action_rules_api.delete("/<name>", apply=[jsoninator])
    def delete_action_rule(name: str) -> ActionRulesResponse:
        """
        Delete the action rule with name=<name>.
        """
        logger.info("name: %s", name)
        error_message = ""

        if ActionRule.exists(name):
            try:
                hmaconfig.delete_config_by_type_and_name("ActionRule", name)
            except Exception as e:
                error_message = "Unexpected error."
                handle_unexpected_error(e)
        else:
            error_message = f"An action rule named '{name}' does not exist."
            logger.warning(
                "An attempt was made to delete an action rule named '%s' that does not exist.",
                name,
            )
            response.status = 500

        return ActionRulesResponse(error_message)

    return action_rules_api
Ejemplo n.º 4
0
    bar: int

    @classmethod
    def from_dict(cls, d):
        return cls(d["foo"], d["bar"])


mock_app = Bottle()


@mock_app.route("/response-is-json/", apply=[jsoninator])
def response_is_json() -> ResponseClass:
    return ResponseClass("X", 10)


@mock_app.post("/response-and-request-is-json/", apply=[jsoninator(RequestBody)])
def response_and_request_is_json(request: RequestBody) -> ResponseClass:
    assert isinstance(request, RequestBody)
    assert request.foo == "X"
    assert request.bar == 10
    return ResponseClass("C", 20)


if __name__ == "__main__":
    mock_app.run(port=9090)


class MiddlewareUnitTest(unittest.TestCase):
    def test_json_response_body(self):
        app = TApp(mock_app)
Ejemplo n.º 5
0
def get_submit_api(
    dynamodb_table: Table,
    image_bucket: str,
    image_prefix: str,
    submissions_queue_url: str,
    hash_queue_url: str,
) -> bottle.Bottle:
    """
    A Closure that includes all dependencies that MUST be provided by the root
    API that this API plugs into. Declare dependencies here, but initialize in
    the root API alone.
    """

    # A prefix to all routes must be provided by the api_root app
    # The documentation below expects prefix to be '/submit/'
    submit_api = bottle.Bottle()
    s3_bucket_image_source = S3BucketContentSource(image_bucket, image_prefix)

    def _content_exist_error(content_id: str):
        return bottle.abort(
            400,
            f"Content with id '{content_id}' already exists if you want to resubmit `force_resubmit=True` must be included in payload.",
        )

    def _record_content_submission_from_request(
        request: SubmitRequestBodyBase, ) -> bool:
        """
        Given a request object submission record the content object to the table passed to
        the API using 'record_content_submission'
        Note: this method does not store the content media itself.
        """

        content_ref, content_ref_type = request.get_content_ref_details()

        return record_content_submission(
            dynamodb_table,
            content_id=request.content_id,
            content_type=request.content_type,
            content_ref=content_ref,
            content_ref_type=content_ref_type,
            additional_fields=set(request.additional_fields)
            if request.additional_fields else set(),
            force_resubmit=request.force_resubmit,
        )

    @submit_api.post("/url/",
                     apply=[jsoninator(SubmitContentViaURLRequestBody)])
    def submit_url(
        request: SubmitContentViaURLRequestBody,
    ) -> t.Union[SubmitResponse, SubmitError]:
        """
        Submission via a url to content. This does not store a copy of the content in s3
        """
        if not _record_content_submission_from_request(request):
            return _content_exist_error(request.content_id)

        send_submission_to_url_queue(
            dynamodb_table,
            submissions_queue_url,
            request.content_id,
            request.content_type,
            request.content_url,
        )

        return SubmitResponse(content_id=request.content_id,
                              submit_successful=True)

    @submit_api.post("/bytes/",
                     apply=[jsoninator(SubmitContentBytesRequestBody)])
    def submit_bytes(
        request: SubmitContentBytesRequestBody,
    ) -> t.Union[SubmitResponse, SubmitError]:
        """
        Submit of media to HMA via a direct transfer of bytes to the system's s3 bucket.
        """
        content_id = request.content_id
        file_contents = base64.b64decode(request.content_bytes)

        # We want to record the submission before triggering and processing on
        # the content itself therefore we write to dynamodb before s3
        if not _record_content_submission_from_request(request):
            return _content_exist_error(request.content_id)

        s3_bucket_image_source.put_image_bytes(content_id, file_contents)

        return SubmitResponse(content_id=request.content_id,
                              submit_successful=True)

    @submit_api.post(
        "/put-url/",
        apply=[jsoninator(SubmitContentViaPutURLUploadRequestBody)])
    def submit_put_url(
        request: SubmitContentViaPutURLUploadRequestBody,
    ) -> t.Union[SubmitViaUploadUrlResponse, SubmitError]:
        """
        Submission of content to HMA in two steps
        1st the creation to a content record and put url based on request body
        2nd Upload to the system's s3 bucket by said put url returned by this method
        """
        presigned_url = create_presigned_put_url(
            bucket_name=image_bucket,
            key=s3_bucket_image_source.get_s3_key(request.content_id),
            file_type=request.file_type,
        )

        if presigned_url:
            if not _record_content_submission_from_request(request):
                return _content_exist_error(request.content_id)

            return SubmitViaUploadUrlResponse(
                content_id=request.content_id,
                file_type=str(request.file_type),
                presigned_url=presigned_url,
            )

        bottle.response.status = 400
        return SubmitError(
            content_id=request.content_id,
            message="Failed to generate upload url",
        )

    @submit_api.post("/hash/",
                     apply=[jsoninator(SubmitContentHashRequestBody)])
    def submit_hash(
        request: SubmitContentHashRequestBody,
    ) -> t.Union[SubmitResponse, SubmitError]:
        """
        Submission of a hash from a piece of content.
        Functions the same as other submission endpoint but skips
        the hasher and media storage.
        """

        # Record content object (even though we don't store anything just like with url)
        if not _record_content_submission_from_request(request):
            return _content_exist_error(request.content_id)

        # Record hash
        #   ToDo expand submit hash API to include `signal_specific_attributes`
        hash_record = PipelineHashRecord(
            content_id=request.content_id,
            signal_type=t.cast(t.Type[SignalType], request.signal_type),
            content_hash=request.signal_value,
            updated_at=datetime.datetime.now(),
        )
        hash_record.write_to_table(dynamodb_table)

        # Send hash directly to matcher
        # todo this could maybe try and reuse the methods in UnifiedHasher in #749
        _get_sqs_client().send_message(
            QueueUrl=hash_queue_url,
            MessageBody=json.dumps(hash_record.to_sqs_message()),
        )

        return SubmitResponse(content_id=request.content_id,
                              submit_successful=True)

    return submit_api
Ejemplo n.º 6
0
    @classmethod
    def from_dict(cls, d):
        return cls(d["foo"], d["bar"])


mock_app = Bottle()


@mock_app.route("/response-is-json/", apply=[jsoninator])
def response_is_json() -> ResponseClass:
    return ResponseClass("X", 10)


@mock_app.post("/response-and-request-is-json/",
               apply=[jsoninator(RequestBody)])
def response_and_request_is_json(request: RequestBody) -> ResponseClass:
    assert isinstance(request, RequestBody)
    assert request.foo == "X"
    assert request.bar == 10
    return ResponseClass("C", 20)


if __name__ == "__main__":
    mock_app.run(port=9090)


class MiddlewareUnitTest(unittest.TestCase):
    def test_json_response_body(self):
        app = TApp(mock_app)
Ejemplo n.º 7
0
def get_submit_api(
    dynamodb_table: Table,
    image_bucket: str,
    image_prefix: str,
    images_topic_arn: str,
) -> bottle.Bottle:
    """
    A Closure that includes all dependencies that MUST be provided by the root
    API that this API plugs into. Declare dependencies here, but initialize in
    the root API alone.
    """

    # A prefix to all routes must be provided by the api_root app
    # The documentation below expects prefix to be '/submit/'
    submit_api = bottle.Bottle()
    s3_bucket_image_source = S3BucketImageSource(image_bucket, image_prefix)

    # Set of helpers that could be split into there own submit endpoints depending on longterm design choices

    def direct_upload(
        request: SubmitContentRequestBody,
    ) -> t.Union[SubmitContentResponse, SubmitContentError]:
        """
        Direct transfer of bits to system's s3 bucket
        """
        content_id = request.content_id
        file_contents = base64.b64decode(
            request.content_bytes_url_or_file_type)

        # We want to record the submission before triggering and processing on
        # the content itself therefore we write to dynamo before s3
        record_content_submission(dynamodb_table, request)

        # TODO a whole bunch more validation and error checking...
        s3_bucket_image_source.put_image_bytes(content_id, file_contents)

        return SubmitContentResponse(content_id=request.content_id,
                                     submit_successful=True)

    def post_url_upload(
        request: SubmitContentRequestBody,
    ) -> t.Union[InitUploadResponse, SubmitContentError]:
        """
        Submission of content to the system's s3 bucket by providing a post url to client
        """
        # TODO error checking on if key already exist etc.
        presigned_url = create_presigned_put_url(
            bucket_name=image_bucket,
            key=s3_bucket_image_source.get_s3_key(request.content_id),
            file_type=request.content_bytes_url_or_file_type,
        )

        if presigned_url:
            record_content_submission(dynamodb_table, request)
            return InitUploadResponse(
                content_id=request.content_id,
                file_type=str(request.content_bytes_url_or_file_type),
                presigned_url=presigned_url,
            )

        bottle.response.status = 400
        return SubmitContentError(
            content_id=request.content_id,
            message="not yet supported",
        )

    def from_url(
        request: SubmitContentRequestBody,
    ) -> t.Union[SubmitContentResponse, SubmitContentError]:
        """
        Submission via a url to content. Current behavior copies content into the system's s3 bucket.
        """
        content_id = request.content_id
        url = request.content_bytes_url_or_file_type

        # Again, We want to record the submission before triggering and processing on
        # the content itself therefore we write to dynamo before s3
        record_content_submission(dynamodb_table, request)

        url_submission_message = URLImageSubmissionMessage(
            content_id, t.cast(str, url))
        _get_sns_client().publish(
            TopicArn=images_topic_arn,
            Message=json.dumps(url_submission_message.to_sqs_message()),
        )

        return SubmitContentResponse(content_id=request.content_id,
                                     submit_successful=True)

    @submit_api.post("/", apply=[jsoninator(SubmitContentRequestBody)])
    def submit(
        request: SubmitContentRequestBody,
    ) -> t.Union[SubmitContentResponse, InitUploadResponse,
                 SubmitContentError]:
        """
        Endpoint to allow for the general submission of content to the system
        """

        assert isinstance(request, SubmitContentRequestBody)
        logger.debug(f"Content Submit Request Received {request.content_id}")

        if request.submission_type == SubmissionType.DIRECT_UPLOAD.name:
            return direct_upload(request)
        elif request.submission_type == SubmissionType.POST_URL_UPLOAD.name:
            return post_url_upload(request)
        elif request.submission_type == SubmissionType.FROM_URL.name:
            return from_url(request)
        else:
            # Other possible submission types are not supported so just echo content_id for testing
            bottle.response.status = 422
            return SubmitContentError(
                content_id=request.content_id,
                message="submission_type not yet supported",
            )

    @submit_api.post("/init-upload/",
                     apply=[jsoninator(InitUploadRequestBody)])
    def init_upload(
        request: InitUploadRequestBody,
    ) -> t.Union[InitUploadResponse, SubmitContentError]:
        """
        Endpoint to provide requester with presigned url to upload a photo
        """

        # TODO error checking on if key already exist etc.
        presigned_url = create_presigned_put_url(
            bucket_name=image_bucket,
            key=s3_bucket_image_source.get_s3_key(request.content_id),
            file_type=request.file_type,
        )
        if presigned_url:
            return InitUploadResponse(
                content_id=request.content_id,
                file_type=request.file_type,
                presigned_url=presigned_url,
            )

        bottle.response.status = 400
        return SubmitContentError(
            content_id=request.content_id,
            message="not yet supported",
        )

    return submit_api
Ejemplo n.º 8
0
def get_datasets_api(
    hma_config_table: str,
    datastore_table: Table,
    threat_exchange_data_bucket_name: str,
    threat_exchange_data_folder: str,
    threat_exchange_pdq_file_extension: str,
) -> bottle.Bottle:
    """
    ToDo / FixMe: this file is probably more about privacy groups than datasets...
    """
    # The documentation below expects prefix to be '/datasets/'
    datasets_api = bottle.Bottle()
    HMAConfig.initialize(hma_config_table)

    @datasets_api.get("/", apply=[jsoninator])
    def get_all_dataset_summaries() -> DatasetSummariesResponse:
        """
        Returns summaries for all datasets. Summary includes all facts that are
        not configurable. Eg. its name, the number of hashes it has, the
        number of matches it has caused, etc.
        """
        return DatasetSummariesResponse(
            threat_exchange_datasets=_get_threat_exchange_datasets(
                datastore_table,
                threat_exchange_data_bucket_name,
                threat_exchange_data_folder,
                threat_exchange_pdq_file_extension,
            ))

    @datasets_api.post("/update", apply=[jsoninator(UpdateDatasetRequest)])
    def update_dataset(request: UpdateDatasetRequest) -> Dataset:
        """
        Update dataset values: fetcher_active, write_back, and matcher_active.
        """
        config = ThreatExchangeConfig.getx(str(request.privacy_group_id))
        config.fetcher_active = request.fetcher_active
        config.write_back = request.write_back
        config.matcher_active = request.matcher_active
        updated_config = hmaconfig.update_config(config).__dict__
        updated_config["privacy_group_id"] = updated_config["name"]
        return Dataset.from_dict(updated_config)

    @datasets_api.post("/create", apply=[jsoninator(CreateDatasetRequest)])
    def create_dataset(request: CreateDatasetRequest) -> CreateDatasetResponse:
        """
        Create a local dataset (defaults defined in CreateDatasetRequest)
        """
        assert isinstance(request, CreateDatasetRequest)

        create_privacy_group_if_not_exists(
            privacy_group_id=str(request.privacy_group_id),
            privacy_group_name=request.privacy_group_name,
            description=request.description,
            in_use=True,
            fetcher_active=request.fetcher_active,
            matcher_active=request.matcher_active,
            write_back=request.write_back,
        )

        return CreateDatasetResponse(
            response=f"Created dataset {request.privacy_group_id}")

    @datasets_api.post("/sync", apply=[jsoninator])
    def sync_datasets() -> SyncDatasetResponse:
        """
        Fetch new collaborations from ThreatExchange and sync with the configs stored in DynamoDB.
        """
        sync_privacy_groups()
        return SyncDatasetResponse(response="Privacy groups are up to date")

    @datasets_api.post("/delete/<key>", apply=[jsoninator])
    def delete_dataset(key=None) -> DeleteDatasetResponse:
        """
        Delete the dataset with key=<key>
        """
        config = ThreatExchangeConfig.getx(str(key))
        hmaconfig.delete_config(config)
        return DeleteDatasetResponse(response="The privacy group is deleted")

    return datasets_api
Ejemplo n.º 9
0
            for metadata_obj in filter(
                    lambda m: m.get_source() == BANKS_SOURCE_SHORT_CODE,
                    match.metadata):
                metadata_obj = t.cast(BankedSignalIndexMetadata, metadata_obj)
                match_objects.append(
                    MatchesForHash(
                        match_distance=int(match.distance),
                        matched_signal=banks_table.get_bank_member(
                            metadata_obj.bank_member_id),
                    ))

        return match_objects

    @matches_api.get(
        "/for-hash/",
        apply=[jsoninator(MatchesForHashRequest, from_query=True)])
    def for_hash(request: MatchesForHashRequest) -> MatchesForHashResponse:
        """
        For a given hash/signal check the index(es) for matches and return the details.

        This does not change system state, metadata returned will not be written any tables
        unlike when matches are found for submissions.
        """

        return MatchesForHashResponse(matches=_matches_for_hash(
            request.signal_type, request.signal_value))

    @matches_api.post("/for-media/",
                      apply=[jsoninator(MatchesForMediaRequest)])
    def for_media(
        request: MatchesForMediaRequest,
Ejemplo n.º 10
0
def get_datasets_api(
    hma_config_table: str,
    datastore_table: Table,
    threat_exchange_data_bucket_name: str,
    threat_exchange_data_folder: str,
) -> bottle.Bottle:
    """
    ToDo / FixMe: this file is probably more about privacy groups than datasets...
    """
    # The documentation below expects prefix to be '/datasets/'
    datasets_api = SubApp()
    HMAConfig.initialize(hma_config_table)

    @datasets_api.get("/", apply=[jsoninator])
    def get_all_dataset_summaries() -> DatasetSummariesResponse:
        """
        Returns summaries for all datasets. Summary includes all facts that are
        not configurable. Eg. its name, the number of hashes it has, the
        number of matches it has caused, etc.
        """
        return DatasetSummariesResponse(
            threat_exchange_datasets=_get_threat_exchange_datasets(
                datastore_table,
                threat_exchange_data_bucket_name,
                threat_exchange_data_folder,
            ))

    @datasets_api.post("/update", apply=[jsoninator(UpdateDatasetRequest)])
    def update_dataset(request: UpdateDatasetRequest) -> Dataset:
        """
        Update dataset values: fetcher_active, write_back, and matcher_active.
        """
        config = ThreatExchangeConfig.getx(str(request.privacy_group_id))
        config.fetcher_active = request.fetcher_active
        config.write_back = request.write_back
        config.matcher_active = request.matcher_active
        updated_config = hmaconfig.update_config(config).__dict__
        updated_config["privacy_group_id"] = updated_config["name"]

        additional_config = AdditionalMatchSettingsConfig.get(
            str(request.privacy_group_id))
        if request.pdq_match_threshold:
            if additional_config:
                additional_config.pdq_match_threshold = int(
                    request.pdq_match_threshold)
                hmaconfig.update_config(additional_config)
            else:
                additional_config = AdditionalMatchSettingsConfig(
                    str(request.privacy_group_id),
                    int(request.pdq_match_threshold))
                hmaconfig.create_config(additional_config)
        elif additional_config:  # pdq_match_threshold was set and now should be removed
            hmaconfig.delete_config(additional_config)

        return Dataset.from_dict(updated_config)

    @datasets_api.post("/create", apply=[jsoninator(CreateDatasetRequest)])
    def create_dataset(request: CreateDatasetRequest) -> CreateDatasetResponse:
        """
        Create a local dataset (defaults defined in CreateDatasetRequest)
        """
        assert isinstance(request, CreateDatasetRequest)

        create_privacy_group_if_not_exists(
            privacy_group_id=str(request.privacy_group_id),
            privacy_group_name=request.privacy_group_name,
            description=request.description,
            in_use=True,
            fetcher_active=request.fetcher_active,
            matcher_active=request.matcher_active,
            write_back=request.write_back,
        )

        return CreateDatasetResponse(
            response=f"Created dataset {request.privacy_group_id}")

    @datasets_api.post("/sync", apply=[jsoninator])
    def sync_datasets() -> SyncDatasetResponse:
        """
        Fetch new collaborations from ThreatExchange and sync with the configs stored in DynamoDB.
        """
        sync_privacy_groups()
        return SyncDatasetResponse(response="Privacy groups are up to date")

    @datasets_api.post("/delete/<key>", apply=[jsoninator])
    def delete_dataset(key=None) -> DeleteDatasetResponse:
        """
        Delete the dataset with key=<key>
        """
        config = ThreatExchangeConfig.getx(str(key))
        hmaconfig.delete_config(config)
        return DeleteDatasetResponse(response="The privacy group is deleted")

    @datasets_api.get("/match-settings", apply=[jsoninator])
    def get_all_match_settings() -> MatchSettingsResponse:
        """
        Return all match settings configs
        """
        return MatchSettingsResponse(match_settings=[
            MatchSettingsResponseBody(c)
            for c in AdditionalMatchSettingsConfig.get_all()
        ])

    @datasets_api.get("/match-settings/<key>", apply=[jsoninator])
    def get_match_settings(key=None, ) -> MatchSettingsResponseBody:
        """
        Return a match settings config for a given privacy_group_id
        """
        if config := AdditionalMatchSettingsConfig.get(str(key)):
            return MatchSettingsResponseBody(config)
        return bottle.abort(400, f"No match_settings for pg_id {key} found")
Ejemplo n.º 11
0
        return MatchSettingsResponse(match_settings=[
            MatchSettingsResponseBody(c)
            for c in AdditionalMatchSettingsConfig.get_all()
        ])

    @datasets_api.get("/match-settings/<key>", apply=[jsoninator])
    def get_match_settings(key=None, ) -> MatchSettingsResponseBody:
        """
        Return a match settings config for a given privacy_group_id
        """
        if config := AdditionalMatchSettingsConfig.get(str(key)):
            return MatchSettingsResponseBody(config)
        return bottle.abort(400, f"No match_settings for pg_id {key} found")

    @datasets_api.post("/match-settings",
                       apply=[jsoninator(MatchSettingsUpdateRequest)])
    def create_or_update_match_settings(
        request: MatchSettingsUpdateRequest, ) -> MatchSettingsUpdateResponse:
        """
        Create or update a match settings config for a given privacy_group_id
        """
        if config := AdditionalMatchSettingsConfig.get(
                request.privacy_group_id):
            config.pdq_match_threshold = request.pdq_match_threshold
            hmaconfig.update_config(config)

            return MatchSettingsUpdateResponse(
                f"match_settings updated for pg_id {request.privacy_group_id} with pdq_match_threshold={request.pdq_match_threshold}"
            )

        config = AdditionalMatchSettingsConfig(request.privacy_group_id,