def testConvertJsonDataFile(self): schema = json.loads(test_util.GetPlacesSchemaString()) infile = self._WriteTempPlacesJsonFile() outfile = os.path.join(self.dirname, 'places.enc_data') master_key = base64.b64decode(_MASTER_KEY) string_hasher = ecrypto.StringHash( ecrypto.GenerateStringHashKey(master_key, _TABLE_ID)) load_lib.ConvertJsonDataFile(schema, master_key, _TABLE_ID, infile, outfile) # validate new data file against new rewritten schema. new_schema = json.loads(_PLACES_REWRITTEN_SCHEMA) load_lib._ValidateJsonDataFile(new_schema, outfile) fout = open(outfile, 'rt') for line in fout: data = json.loads(line) break self.assertEqual(data['kind'], 'person') self.assertTrue(util.SEARCHWORDS_PREFIX + u'gender' in data) (model_iv, model_hash) = data[util.SEARCHWORDS_PREFIX + u'gender'].split(' ') expected_model_key_hash = string_hasher.GetStringKeyHash( util.SEARCHWORDS_PREFIX + u'gender', u'Male'.lower()) expected_model_hash = base64.b64encode(hashlib.sha1( model_iv + expected_model_key_hash).digest()[:8]) self.assertEquals(expected_model_hash, model_hash) self.assertTrue(util.SEARCHWORDS_PREFIX + u'place' in data['citiesLived'][0]) (model_iv, model_hash) = data[ 'citiesLived'][0][util.SEARCHWORDS_PREFIX + u'place'].split(' ') expected_model_key_hash = string_hasher.GetStringKeyHash( util.SEARCHWORDS_PREFIX + u'place', u'Seattle'.lower()) expected_model_hash = base64.b64encode(hashlib.sha1( model_iv + expected_model_key_hash).digest()[:8]) self.assertEquals(expected_model_hash, model_hash) self.assertEquals(data['spouse']['spouseAge'], 23) fout.close()
def testConvertJsonDataFile(self): schema = json.loads(test_util.GetPlacesSchemaString()) infile = self._WriteTempPlacesJsonFile() outfile = os.path.join(self.dirname, 'places.enc_data') master_key = base64.b64decode(_MASTER_KEY) string_hasher = ecrypto.StringHash( ecrypto.GenerateStringHashKey(master_key, _TABLE_ID)) load_lib.ConvertJsonDataFile(schema, master_key, _TABLE_ID, infile, outfile) # validate new data file against new rewritten schema. new_schema = json.loads(_PLACES_REWRITTEN_SCHEMA) load_lib._ValidateJsonDataFile(new_schema, outfile) fout = open(outfile, 'rt') for line in fout: data = json.loads(line) break self.assertEqual(data['kind'], 'person') self.assertTrue(util.SEARCHWORDS_PREFIX + u'gender' in data) (model_iv, model_hash) = data[util.SEARCHWORDS_PREFIX + u'gender'].split(' ') expected_model_key_hash = string_hasher.GetStringKeyHash( util.SEARCHWORDS_PREFIX + u'gender', u'Male'.lower()) expected_model_hash = base64.b64encode( hashlib.sha1(model_iv + expected_model_key_hash).digest()[:8]) self.assertEquals(expected_model_hash, model_hash) self.assertTrue(util.SEARCHWORDS_PREFIX + u'place' in data['citiesLived'][0]) (model_iv, model_hash) = data['citiesLived'][0][util.SEARCHWORDS_PREFIX + u'place'].split(' ') expected_model_key_hash = string_hasher.GetStringKeyHash( util.SEARCHWORDS_PREFIX + u'place', u'Seattle'.lower()) expected_model_hash = base64.b64encode( hashlib.sha1(model_iv + expected_model_key_hash).digest()[:8]) self.assertEquals(expected_model_hash, model_hash) self.assertEquals(data['spouse']['spouseAge'], 23) checked = [] # look for lat,long in citiesLived found_any = False for city in data['citiesLived']: checked.append(city) if city.get('lat', None) is None: continue found_any = True self.assertTrue(isinstance(city['lat'], float)) self.assertTrue(isinstance(city['long'], float)) self.assertTrue(city['lat'] >= 0.0) self.assertTrue(city['long'] >= 0.0) self.assertTrue( found_any, 'found_any %s checked ( %s )' % (found_any, ' , '.join(map(str, checked)))) fout.close()
def testConvertComplexJsonDataFile(self): schema = json.loads(test_util.GetJobsSchemaString()) infile = self._WriteTempJobsJsonFile() outfile = os.path.join(self.dirname, 'jobs.enc_data') master_key = base64.b64decode(_MASTER_KEY) string_hasher = ecrypto.StringHash( ecrypto.GenerateStringHashKey(master_key, _TABLE_ID)) load_lib.ConvertJsonDataFile(schema, master_key, _TABLE_ID, infile, outfile) # validate new data file against new rewritten schema. new_schema = json.loads(_JOBS_REWRITTEN_SCHEMA) load_lib._ValidateJsonDataFile(new_schema, outfile) fout = open(outfile, 'rt') for line in fout: data = json.loads(line) break self.assertEqual(data['kind'], 'person') self.assertTrue(util.SEARCHWORDS_PREFIX + u'gender' in data) (model_iv, model_hash) = data[util.SEARCHWORDS_PREFIX + u'gender'].split(' ') expected_model_key_hash = string_hasher.GetStringKeyHash( util.SEARCHWORDS_PREFIX + u'gender', u'Male'.lower()) expected_model_hash = base64.b64encode( hashlib.sha1(model_iv + expected_model_key_hash).digest()[:8]) self.assertEquals(expected_model_hash, model_hash) self.assertTrue(util.SEARCHWORDS_PREFIX + u'place' in data['citiesLived'][0]) (model_iv, model_hash) = data['citiesLived'][0][util.SEARCHWORDS_PREFIX + u'place'].split(' ') expected_model_key_hash = string_hasher.GetStringKeyHash( util.SEARCHWORDS_PREFIX + u'place', u'Seattle'.lower()) expected_model_hash = base64.b64encode( hashlib.sha1(model_iv + expected_model_key_hash).digest()[:8]) self.assertEquals(expected_model_hash, model_hash) self.assertEquals(data['citiesLived'][0]['job'][0]['jobRank'], 1) self.assertEquals(data['citiesLived'][1]['job'], []) self.assertEquals( len(data['citiesLived'][0]['job'][0][util.SEARCHWORDS_PREFIX + u'manager']), 3) self.assertEquals( len(data['citiesLived'][0]['job'][0][util.SEARCHWORDS_PREFIX + u'manager'][0].split(' ')), 4) fout.close()
def testValidateComplexJsonDataFile(self): schema = json.loads(test_util.GetJobsSchemaString()) infile = self._WriteTempJobsJsonFile() load_lib._ValidateJsonDataFile(schema, infile)