Example #1
0
 def init_tests(self):
     self.maps = load_json_file('data/attributeMaps.json', encoding='utf8')
     self.datasets = []
     datasets = load_json_file('data/testDatasets.json')
     for dt in datasets:
         d = Dataset()
         self.datasets.append(d.unmarshall(dt))
Example #2
0
    def test_4(self):
        maps = load_json_file('data/attributeMaps.json', encoding='utf8')
        reqj = load_json_file('data/testLinkRequest4.json', encoding='utf8')

        lreq = LinkRequest()
        lreq.unmarshall(reqj)

        r = Reconciliation()
        r.set_mappings(maps)
        with self.assertRaises(NoMatchingRules) as context:
            sim = r.similarity(lreq.datasetA, lreq.datasetB)
Example #3
0
    def test_3(self):
        maps = load_json_file('data/attributeMaps.json', encoding='utf8')
        reqj = load_json_file('data/testLinkRequest3.json', encoding='utf8')

        lreq = LinkRequest()
        lreq.unmarshall(reqj)

        r = Reconciliation()
        r.set_mappings(maps)
        sim = r.similarity(lreq.datasetA, lreq.datasetB)
        print(reqj)
        print(sim)
        self.assertEqual(sim, 1.0)
Example #4
0
    def init_tests(self):
        # Test data sets
        self.datasets = []
        datasets = load_json_file('data/testDatasets.json')
        for dt in datasets:
            d = Dataset()
            self.datasets.append(d.unmarshall(dt))

        self.mappings = []
        mappings = load_json_file('data/attributeMaps.json')
        for mt in mappings:
            m = AttributeMap()
            self.mappings.append(m.unmarshall(mt))
Example #5
0
 def test_unmarshall_marshall_null_field(self):
     lreq = load_json_file('data/testLinkRequestWithNulls.json')
     d = LinkRequest()
     d.unmarshall(lreq)
     print(d)
     d.json_marshall()
     print(d.json_marshall())
Example #6
0
    def test_LinkRequest(self):
        lreq = load_json_file('data/testLinkRequest.json')

        r = LinkRequest()
        r.unmarshall(lreq)

        self.assertEqual(r.datasetA.properties['test'],
                         lreq['datasetA']['properties']['test'])
Example #7
0
    def test_Dataset(self):
        datasets = load_json_file('data/testDatasets.json')

        d = Dataset()
        d.unmarshall(datasets[0])

        self.assertEqual(d.properties['test'],
                         datasets[0]['properties']['test'])
Example #8
0
    def test_AttributeMap(self):
        mappings = load_json_file('data/attributeMaps.json')

        m = AttributeMap()
        m.unmarshall(mappings[0])

        self.assertEqual(m.pairings[0].attributes,
                         mappings[0]['pairings'][0]['attributes'])
Example #9
0
    def test_datastore_api(self):
        cm = CMHandler('data/')
        sm = cm.get_microservice_by_api('SM')

        smh = SMHandler(sm,
                        key='data/httpsig_key_esmo.pem',
                        retries=5,
                        validate=False)

        lreq = load_json_file('data/testLinkRequest.json')
        storeEnt = StoreEntry()
        storeEnt.id = 'testID'
        storeEnt.type = 'linkRequest'
        storeEnt.data = lreq  # json.dumps(lreq)

        # Start session
        sessionID = smh.startSession()
        self.assertIsNotNone(sessionID)

        smh.resetDatastore()

        smh.addDatastoreEntry(storeEnt.id, storeEnt.type, storeEnt.data)

        read_req = smh.getDatastoreEntry(storeEnt.id)
        self.assertEqual(storeEnt.id, read_req.id)
        self.assertEqual(storeEnt.type, read_req.type)
        self.assertEqual(storeEnt.data['datasetA']['id'],
                         read_req.data.datasetA.id)
        print("storeEnt.data:" + json.dumps(storeEnt.data))
        print("read_req.data:" + json.dumps(read_req.data.marshall()))

        read_reqs = smh.searchDatastoreEntries(storeEnt.type)
        read_req = read_reqs[0]
        self.assertEqual(storeEnt.id, read_req.id)
        self.assertEqual(storeEnt.type, read_req.type)
        self.assertEqual(storeEnt.data['datasetA']['id'],
                         read_req.data.datasetA.id)

        smh.deleteDatastoreEntry(storeEnt.id)

        with self.assertRaises(SessionManagerError):
            smh.getDatastoreEntry(storeEnt.id)
Example #10
0
def test_client_submit_linking_request():
    smh = sm_handler()

    smh.startSession()
    print("Started session: " + smh.sessId + "<br/>\n")

    smh.writeSessionVar(cburl, 'ClientCallbackAddr')

    token = smh.generateToken("SAMLms_0001", "SAMLms_0001")
    print("Generated msToken: " + token + "<br/>\n")

    streq = load_json_file(test_link_req)
    treq = LinkRequest()
    treq.unmarshall(streq)
    smh.writeSessionVar(treq.marshall(), 'linkRequest')
    print("Wrote linkRequest in Session: " + treq.json_marshall() + "<br/>\n")

    template = {
        'url': '/link/request/submit',
        'token': token,
    }

    return render_template('msToken.html', template=template)
Example #11
0
def submit_linking_request():
    clean_expired()

    # Check msToken against SM
    if 'msToken' in request.form:
        msToken = request.form['msToken']
    elif 'msToken' in request.args:
        msToken = request.args['msToken']
    else:
        return "missing msToken POST/GET parameter or bad content-type", 404

    smh = get_session_manager()
    try:
        smh.validateToken(msToken)
    except SessionManagerError as err:
        return "Error validating msToken: " + str(err), 403

    try:
        dest_url = smh.getSessionVar('ClientCallbackAddr')
    except SessionManagerError as err:
        return "Error retrieving ClientCallbackAddr from SM: " + str(err), 403

    # Load mappings to apply
    mappings_dicts = load_json_file(data_dir + 'attributeMaps.json')
    reconciliation = Reconciliation()
    reconciliation.set_mappings(mappings_dicts)

    # Parse input request
    try:
        link_req = smh.getSessionVar('linkRequest')
    except SessionManagerError as err:
        # return "Error retrieving linkRequest from SM: " + str(err), 403
        return redirect_return(
            smh, dest_url, 'ERROR', msID, apigwID,
            "Error retrieving linkRequest from SM: " + str(err))
    req = LinkRequest()
    req.json_unmarshall(link_req)

    now = datetime.now()

    request_id = str(uuid.uuid4())

    # Create a database row object
    db_req = database.Request(request_id=request_id,
                              request_date=now,
                              status=StatusCodes.PENDING,
                              dataset_a=req.datasetA.json_marshall(),
                              dataset_b=req.datasetB.json_marshall())

    # Store it on DB
    session = database.DbSession()
    session.add(db_req)
    session.commit()

    # Calculate similarity
    try:
        similarity = reconciliation.similarity(req.datasetA, req.datasetB)
    except (NoMatchingRules, MissingOrBadParams) as err:
        return redirect_return(
            smh, dest_url, 'ERROR', msID, apigwID,
            "Error performing the similarity check: " + str(err))
    db_req.similarity = similarity

    # Update request with the LLoA and the acceptance status
    if similarity >= acceptance_threshold:
        db_req.status = StatusCodes.ACCEPTED
    else:
        db_req.status = StatusCodes.REJECTED

    session.add(db_req)
    session.commit()
    session.close()

    # Fill in the requestID and store the request object in the dataStore
    req.id = request_id
    request_json = req.json_marshall()

    try:
        # Build the unique persistent storeEntry identifier # TODO: parametrise module name (search more incidences)
        entry_id = build_store_id_from_req("seal-autorecon", issuer,
                                           req.datasetA, req.datasetB)
        smh.addDatastoreEntry(entry_id, "linkRequest",
                              req.marshall())  # TODO: parametrise type
    except SessionManagerError as err:
        return redirect_return(
            smh, dest_url, 'ERROR', msID, apigwID,
            "Error writing updated linkRequest to dataStore: " + str(err))
    except StoreIDBuildError as err:
        return redirect_return(smh, dest_url, 'ERROR', msID, apigwID,
                               "Error building store ID: " + str(err))

    # We also overwrite it in SM session
    try:
        smh.writeSessionVar(req.marshall(),
                            'linkRequest')  # TODO: expected variable?
    except SessionManagerError as err:
        # return "Error writing updated linkRequest to SM: " + str(err), 403
        return redirect_return(
            smh, dest_url, 'ERROR', msID, apigwID,
            "Error writing updated linkRequest to SM: " + str(err))

    # return Response(request_json,
    #                status=200,
    #                mimetype='application/json')
    return redirect_return(smh, dest_url, 'OK', msID, apigwID)
Example #12
0
    def _get(self, file_path, remote_url=None):
        ret_dict = None
        logging.debug(
            f"Try to get CM metadata set from local file cache in {file_path}")
        # First, try to read locally
        if os.path.isfile(file_path) \
                and os.path.getsize(file_path) > 0:
            try:
                logging.debug("There is a file and has size")
                ret_dict = load_json_file(file_path)
                logging.debug(f"Read from file: {ret_dict}")
            except (OSError, JSONDecodeError, TypeError) as err:
                logging.warning(f"Error accessing file {file_path}: {err}")
        else:
            logging.debug("Not a file or empty")

        # If cache is outdated, fetch file (if we have a url and key)
        last_write = datetime.fromtimestamp(os.path.getmtime(file_path))
        threshold = datetime.now() - timedelta(seconds=self.lifetime)
        logging.debug(f"is cache outdated? {last_write} < {threshold} ?")
        if self.httpsig and last_write < threshold:
            res = None
            try:
                logging.debug(f"Fetching CM metadata set from {remote_url}")
                res = self.httpsig.get(remote_url)
                logging.debug(f"Fetched")
            except HttpError as err:
                logging.warning(
                    f"Error fetching remote config file {remote_url}: {err}")
            except HttpConnectError as err:
                logging.error(
                    f"Error fetching remote config file {remote_url}: {err}")

            # Parse the fetched file.
            if res and res.text:
                logging.debug(f"Fetched data is not empty: {res.text}")
                fetched_dict = None
                try:
                    logging.debug(f"Parsing data into json")
                    fetched_dict = json.loads(res.text)
                except (JSONDecodeError, TypeError) as err:
                    logging.warning(
                        f"Error decoding fetched file {file_path} from {remote_url}: {err}"
                    )

                #  If successful, overwrite
                if fetched_dict:
                    logging.debug(f"Successfully parsed")
                    try:
                        logging.debug(f"Overwriting cache in {file_path}")
                        with open(file_path, 'w') as f:
                            f.write(res.text)
                    except (OSError, IOError) as err:
                        logging.warning(
                            f"Error overwriting file {file_path}: {err}")
                    # Regardless of we were able to overwrite or not, return the fetched data
                    logging.debug(f"Overwritten")
                    ret_dict = fetched_dict

        # Return config set contents, or error if empty
        if not ret_dict:
            raise CMHandlerError(
                "Requested config set not found or empty and could not be fetched remotely"
            )
        return ret_dict
Example #13
0
 def test_unmarshall_marshall(self):
     datasets = load_json_file('data/testDatasets.json')
     d = Dataset()
     d.unmarshall(datasets[0])
     d.json_marshall()
Example #14
0
 def test_caster_works(self):
     mappings = load_json_file('data/attributeMaps.json')
     for mt in mappings:
         m = cast_from_dict(mt, AttributeMap)
         self.assertEqual(m.pairings[0].attributes,
                          mappings[0]['pairings'][0]['attributes'])