예제 #1
0
    def test_json_multicollection_correct_winding(self):
        with open(os.path.join(TEST_DIR, 'test_geocollection.json'),
                  'r') as fd:
            json_geo = json.load(fd)
        json_geo['geometries'][0]['coordinates'][0].reverse()
        json_geo['geometries'][1]['coordinates'][0].reverse()

        self.assertFalse(utils.HasCorrectGeoJsonWinding(json_geo))
        corr_poly = utils.InsureGeoJsonWinding(json_geo)
        self.assertTrue(utils.HasCorrectGeoJsonWinding(json_geo))
예제 #2
0
    def test_polygon_correct_geojson_winding(self):
        poly_with_hole = {
            'type':
            'Polygon',
            'coordinates': [[[-97.23, 38.86], [-97.31, 38.76], [-97.16, 38.73],
                             [-97.16, 38.86], [-97.23, 38.86]],
                            [[-97.21, 38.82], [-97.18, 38.81], [-97.19, 38.78],
                             [-97.22, 38.78], [-97.21, 38.82]]]
        }
        self.assertTrue(utils.HasCorrectGeoJsonWinding(poly_with_hole))

        poly_with_hole['coordinates'][0] = list(
            reversed(poly_with_hole['coordinates'][0]))
        self.assertFalse(utils.HasCorrectGeoJsonWinding(poly_with_hole))

        poly_with_hole['coordinates'][1] = list(
            reversed(poly_with_hole['coordinates'][1]))
        self.assertFalse(utils.HasCorrectGeoJsonWinding(poly_with_hole))

        poly_with_hole['coordinates'][0] = list(
            reversed(poly_with_hole['coordinates'][0]))
        self.assertFalse(utils.HasCorrectGeoJsonWinding(poly_with_hole))
    def test_WINNF_FT_S_FAD_1(self, config_filename):
        """ This test verifies that a SAS UUT can successfully respond to a full
            activity dump request from a SAS Test Harness

		  SAS UUT approves the request and responds,
		  with correct content and format for both dump message and files
      """
        # load config file
        config = loadConfig(config_filename)
        # Very light checking of the config file.
        self.assertEqual(len(config['registrationRequests']),
                         len(config['grantRequests']))
        # check that the config file contains consistent PAL&PPA data
        for index, grant in enumerate(config['grantRequests']):
            grant_frequency_range = grant['operationParam'][
                'operationFrequencyRange']
            for ppa in config['ppaRecords']:
                if index in ppa['ppaClusterList']:
                    frequency_ranges_of_pals = [{'frequencyRange' : {'lowFrequency' : pal['channelAssignment']['primaryAssignment']['lowFrequency'],\
                      'highFrequency' : pal['channelAssignment']['primaryAssignment']['highFrequency']}} \
                      for pal in config['palRecords'] if pal['palId'] in ppa['ppaRecord']['ppaInfo']['palId']]
                    self.assertLessEqual(
                        1, frequency_ranges_of_pals,
                        'Empty list of Frequency Ranges in the PAL config')
                    low_freq = min([
                        freq_range['frequencyRange']['lowFrequency']
                        for freq_range in frequency_ranges_of_pals
                    ])
                    high_freq = max([
                        freq_range['frequencyRange']['highFrequency']
                        for freq_range in frequency_ranges_of_pals
                    ])
                    self.assertChannelsContainFrequencyRange(
                        frequency_ranges_of_pals, {
                            'lowFrequency': low_freq,
                            'highFrequency': high_freq
                        })
                    # check that the grant in config file is not mixed of PAL and GAA channels
                    if low_freq <= grant_frequency_range[
                            'lowFrequency'] <= high_freq:
                        self.assertLessEqual(
                            grant_frequency_range['highFrequency'], high_freq,
                            'incorrect high frequency of the grant with index {0}, makes it GAA&PAL Mixed Grant'
                            .format(index))
                    if low_freq <= grant_frequency_range[
                            'highFrequency'] <= high_freq:
                        self.assertGreaterEqual(
                            grant_frequency_range['lowFrequency'], low_freq,
                            'incorrect low frequency of the grant with index {0}, makes it a GAA&PAL Mixed Grant'
                            .format(index))
        for index, device in enumerate(config['registrationRequests']):
            # check azimuth in the CBSD config, if the beamwidth is not 0 or 360 and the azimuth is not provided, CBSD registration may be rejected
            reg_conditional_device_data_list = [reg for reg in \
                  config['conditionalRegistrationData']['registrationData'] if reg['fccId'] == device['fccId'] and \
                  reg['cbsdSerialNumber'] == device['cbsdSerialNumber'] ]
            if len(reg_conditional_device_data_list) == 1:
                reg_conditional_installation_param = reg_conditional_device_data_list[
                    0]['installationParam']
            elif len(reg_conditional_device_data_list) > 1:
                self.fail(
                    'invalid conditional Registration Data, multi conditional Registration configs for the cbsd with index: {0} '
                    .format(index))
            else:
                reg_conditional_installation_param = {}
            registeration_antenna_azimuth = device['installationParam']['antennaAzimuth'] \
                if 'installationParam' in device and 'antennaAzimuth' in device['installationParam'] \
                else reg_conditional_installation_param['antennaAzimuth'] \
                if 'antennaAzimuth' in reg_conditional_installation_param else None
            registeration_antenna_beamwidth = device['installationParam']['antennaBeamwidth'] \
                if  'installationParam' in device and 'antennaBeamwidth' in device['installationParam'] \
                else reg_conditional_installation_param['antennaBeamwidth'] \
                if 'antennaBeamwidth' in reg_conditional_installation_param else None
            if registeration_antenna_beamwidth != None and registeration_antenna_beamwidth not in [0, 360]\
              and registeration_antenna_azimuth is None:
                self.fail(
                    'invalid config, missing azimuth value for CBSD config with index: {0} '
                    .format(index))
            # inject FCC ID and User ID of CBSD
            self._sas_admin.InjectFccId({
                'fccId': device['fccId'],
                'fccMaxEirp': 47
            })
            self._sas_admin.InjectUserId({'userId': device['userId']})

        # Pre-load conditional registration data for N3 CBSDs.
        self._sas_admin.PreloadRegistrationData(
            config['conditionalRegistrationData'])

        # Register N1 CBSDs.
        request = {'registrationRequest': config['registrationRequests']}
        responses = self._sas.Registration(request)['registrationResponse']

        # Check registration responses and get cbsd Id
        self.assertEqual(len(responses), len(config['registrationRequests']))
        cbsd_ids = []
        for response in responses:
            self.assertEqual(response['response']['responseCode'], 0)
            cbsd_ids.append(response['cbsdId'])
        # inject PALs and N2 PPAs
        ppa_ids = []
        for pal in config['palRecords']:
            try:
                self._sas_admin.InjectPalDatabaseRecord(pal)
            except Exception:
                logging.error(common_strings.CONFIG_ERROR_SUSPECTED)
                raise
        for ppa in config['ppaRecords']:
            # fill the PPA cbsdReferenceIds with values according to admin testing API spec
            ppa['ppaRecord']['ppaInfo']['cbsdReferenceId'] = []
            for index in ppa['ppaClusterList']:
                ppa['ppaRecord']['ppaInfo']['cbsdReferenceId'].append(
                    cbsd_ids[index])
            try:
                ppa_ids.append(
                    self._sas_admin.InjectZoneData(
                        {'record': ppa['ppaRecord']}))
            except Exception:
                logging.error(common_strings.CONFIG_ERROR_SUSPECTED)
                raise
            # re-fill the PPA cbsdReferenceIds with the values expected in the dump according to SAS-SAS TS
            ppa['ppaRecord']['ppaInfo']['cbsdReferenceId'] = []
            for index in ppa['ppaClusterList']:
                cbsd = config['registrationRequests'][index]
                ppa['ppaRecord']['ppaInfo']['cbsdReferenceId'].append(
                    generateCbsdReferenceId(cbsd['fccId'],
                                            cbsd['cbsdSerialNumber']))
        grants = config['grantRequests']
        for index, response in enumerate(responses):
            self.assertEqual(response['response']['responseCode'], 0)
            grants[index]['cbsdId'] = response['cbsdId']
        # send grant request with N1 grants
        del responses
        grant_responses = self._sas.Grant({'grantRequest':
                                           grants})['grantResponse']
        # check grant response
        self.assertEqual(len(grant_responses), len(config['grantRequests']))
        for grant_response in grant_responses:
            self.assertEqual(grant_response['response']['responseCode'], 0)
        # inject N3 Esc sensor
        for esc_sensor in config['escSensorRecords']:
            try:
                self._sas_admin.InjectEscSensorDataRecord(
                    {'record': esc_sensor})
            except Exception:
                logging.error(common_strings.CONFIG_ERROR_SUSPECTED)
                raise
        # step 7
        # Notify the SAS UUT about the SAS Test Harness
        for sas_th in config['sasTestHarnessConfigs']:
            certificate_hash = getCertificateFingerprint(sas_th['serverCert'])
            url = 'https://' + sas_th['hostName'] + ':' + str(sas_th['port'])
            self._sas_admin.InjectPeerSas({
                'certificateHash': certificate_hash,
                'url': url
            })
        sas_th_config = config['sasTestHarnessConfigs'][0]
        response = self.TriggerFullActivityDumpAndWaitUntilComplete(
            sas_th_config['serverCert'], sas_th_config['serverKey'])
        # verify that all the SASes get the same response :
        # check dump message format
        self.assertContainsRequiredFields("FullActivityDump.schema.json",
                                          response)
        # an array for each record type
        cbsd_dump_data = []
        ppa_dump_data = []
        esc_sensor_dump_data = []
        # step 8 and check
        # download dump files and fill corresponding arrays
        downloaded_files = {}
        for dump_file in response['files']:
            self.assertContainsRequiredFields("ActivityDumpFile.schema.json",
                                              dump_file)
            downloaded_file = None
            if dump_file['recordType'] != 'coordination':
                downloaded_file = self._sas.DownloadFile(dump_file['url'],\
                  sas_th_config['serverCert'], sas_th_config['serverKey'])
                # The downloaded_file is being modified in the assertions below,
                # and hence we need a deep copy to verify that dump files are the
                # same when requested by different SASs.
                downloaded_files[dump_file['url']] = copy.deepcopy(
                    downloaded_file)
            if dump_file['recordType'] == 'cbsd':
                cbsd_dump_data.extend(downloaded_file['recordData'])
            elif dump_file['recordType'] == 'esc_sensor':
                esc_sensor_dump_data.extend(downloaded_file['recordData'])
            elif dump_file['recordType'] == 'zone':
                ppa_dump_data.extend(downloaded_file['recordData'])
            else:
                self.assertEqual('coordination', dump_file['recordType'])
        # verify the length of records equal to the inserted ones
        self.assertEqual(len(config['registrationRequests']),
                         len(cbsd_dump_data))
        self.assertEqual(len(config['ppaRecords']), len(ppa_dump_data))
        self.assertEqual(len(config['escSensorRecords']),
                         len(esc_sensor_dump_data))
        # verify the schema of record and first two parts of PPA record Id
        for ppa_record in ppa_dump_data:
            self.assertContainsRequiredFields("ZoneData.schema.json",
                                              ppa_record)
            self.assertEqual(ppa_record['id'].split("/")[0], 'zone')
            self.assertEqual(ppa_record['id'].split("/")[1], 'ppa')
            self.assertEqual(ppa_record['id'].split("/")[2],
                             self._sas._sas_admin_id)
            del ppa_record['id']
            # remove creator from value check
            del ppa_record['creator']
            # verify that the injected ppas exist in the dump files
            # check GeoJson Winding of PPA record
            utils.HasCorrectGeoJsonWinding(
                ppa_record['zone']['features'][0]['geometry'])
            exist_in_dump = False
            for ppa_conf in config['ppaRecords']:
                ppa = ppa_conf['ppaRecord']
                if 'id' in ppa:
                    del ppa['id']
                # remove creator from value check
                if 'creator' in ppa:
                    del ppa['creator']
                exist_in_dump = exist_in_dump or areTwoPpasEqual(
                    ppa_record, ppa)
            if exist_in_dump:
                break
            self.assertTrue(exist_in_dump)
        # verify the schema of record and two first parts of esc sensor record  Id
        for esc_record in esc_sensor_dump_data:
            self.assertContainsRequiredFields("EscSensorRecord.schema.json",
                                              esc_record)
            self.assertEqual(esc_record['id'].split("/")[0], 'esc_sensor')
            self.assertEqual(esc_record['id'].split("/")[1],
                             self._sas._sas_admin_id)
            del esc_record['id']
            # verify that all the injected Esc sensors exist in the dump files
            exist_in_dump = False
            for esc in config['escSensorRecords']:
                if 'id' in esc:
                    del esc['id']
                exist_in_dump = exist_in_dump or compareDictWithUnorderedLists(
                    esc_record, esc)
                if exist_in_dump:
                    break
            self.assertTrue(exist_in_dump)

        # verify that retrieved cbsd dump files have correct schema
        for cbsd_record in cbsd_dump_data:
            self.assertContainsRequiredFields("CbsdData.schema.json",
                                              cbsd_record)
            self.assertFalse("cbsdInfo" in cbsd_record)
        # verify all the previous activities on CBSDs and Grants exist in the dump files
        self.assertCbsdRecord(config['registrationRequests'], grants,
                              grant_responses, cbsd_dump_data,
                              config['conditionalRegistrationData'])
        # step 10 check all SAS Test Harnesses retrieve all of the data in the Full Activity Dump from the SAS UUT
        for sas_th in config['sasTestHarnessConfigs'][1:]:
            dump_message = self._sas.GetFullActivityDump(
                sas_th['serverCert'], sas_th['serverKey'])
            # check that dump message is the same as the message retreived by the first SAS TH
            compareDictWithUnorderedLists(response, dump_message)
            # check that dump files are the same as the files retreived by the first SAS TH
            for dump_file in dump_message['files']:
                if dump_file['recordType'] != 'coordination':
                    downloaded_file = self._sas.DownloadFile(dump_file['url'],\
                      sas_th['serverCert'], sas_th['serverKey'])
                    self.assertDictEqual(downloaded_files[dump_file['url']],
                                         downloaded_file)