def _find_by_geo_with_capacity(self, query: GeoQuery, **kwargs) -> Set[Site]: # START Challenge #5 # Your task: Get the sites matching the GEO query. # END Challenge #5 p = self.redis.pipeline(transaction=False) # START Challenge #5 # # Your task: Populate a dictionary called "scores" whose keys are site # IDs and whose values are the site's capacity. # # Make sure to run any Redis commands against a Pipeline object # for better performance. # END Challenge #5 # Delete the next lines after you've populated a `site_ids` # and `scores` variable. site_ids: List[str] = [] scores: Dict[str, float] = {} for site_id in site_ids: if scores[site_id] and scores[site_id] > CAPACITY_THRESHOLD: p.hgetall(self.key_schema.site_hash_key(site_id)) site_hashes = p.execute() return {FlatSiteSchema().load(site) for site in site_hashes}
def _find_by_geo_with_capacity(self, query: GeoQuery, **kwargs) -> Set[Site]: # START Challenge #5 # Your task: Get the sites matching the GEO query. site_ids = self.redis.georadius( self.key_schema.site_geo_key(), query.coordinate.lng, query.coordinate.lat, query.radius, unit=query.radius_unit.value, ) # END Challenge #5 p = self.redis.pipeline(transaction=False) # START Challenge #5 # # Your task: Populate a dictionary called "scores" whose keys are site # IDs and whose values are the site's capacity. # # Make sure to run any Redis commands against a Pipeline object # for better performance. for site_id in site_ids: p.zscore(self.key_schema.capacity_ranking_key(), site_id) scores = dict(zip(site_ids, p.execute())) # END Challenge #5 for site_id in site_ids: if scores[site_id] and scores[site_id] > CAPACITY_THRESHOLD: p.hgetall(self.key_schema.site_hash_key(site_id)) site_hashes = p.execute() return {FlatSiteSchema().load(site) for site in site_hashes}
def _find_by_geo_with_capacity(self, query: GeoQuery, **kwargs) -> Set[Site]: # START Challenge #5 site_ids = self.redis.georadius( # type: ignore self.key_schema.site_geo_key(), query.coordinate.lng, query.coordinate.lat, query.radius, query.radius_unit.value) # END Challenge #5 p = self.redis.pipeline(transaction=False) # START Challenge #5 scores: Dict[str, float] = {} capacity_ranking_key = self.key_schema.capacity_ranking_key() for site_id in site_ids: p.zscore(capacity_ranking_key, site_id) capacities = p.execute() index = 0 for site_id in site_ids: scores[site_id] = capacities[index] index += 1 # END Challenge #5 for site_id in site_ids: if scores[site_id] and scores[site_id] > CAPACITY_THRESHOLD: p.hgetall(self.key_schema.site_hash_key(site_id)) site_hashes = p.execute() return {FlatSiteSchema().load(site) for site in site_hashes}
def insert(self, site: Site, **kwargs): """Insert a Site into Redis.""" hash_key = self.key_schema.site_hash_key(site.id) site_ids_key = self.key_schema.site_ids_key() client = kwargs.get('pipeline', self.redis) client.hset(hash_key, mapping=FlatSiteSchema().dump(site)) client.sadd(site_ids_key, site.id)
def find_all(self, **kwargs) -> Set[Site]: """Find all Sites in Redis.""" # START Challenge #1 # Remove this line when you've written code to build `site_hashes`. site_hashes = [] # type: ignore # END Challenge #1 return {FlatSiteSchema().load(site_hash) for site_hash in site_hashes}
def _find_by_geo(self, query: GeoQuery, **kwargs) -> Set[Site]: site_ids = self.redis.georadius( # type: ignore self.key_schema.site_geo_key(), query.coordinate.lng, query.coordinate.lat, query.radius, query.radius_unit.value) sites = [ self.redis.hgetall(self.key_schema.site_hash_key(site_id)) for site_id in site_ids ] return {FlatSiteSchema().load(site) for site in sites}
def find_by_id(self, site_id: int, **kwargs) -> Site: """Find a Site by ID in Redis.""" hash_key = self.key_schema.site_hash_key(site_id) site_hash = self.redis.hgetall(hash_key) if not site_hash: raise SiteNotFound() return FlatSiteSchema().load(site_hash)
def find_all(self, **kwargs) -> Set[Site]: """Find all Sites.""" site_ids = self.redis.zrange(self.key_schema.site_geo_key(), 0, -1) p = self.redis.pipeline(transaction=False) for site_id in site_ids: p.hgetall(self.key_schema.site_hash_key(site_id)) h_data = p.execute() h_data = filter(lambda x: x is not None, h_data) sites = {FlatSiteSchema().load(site_hash) for site_hash in h_data} return sites
def find_all(self, **kwargs) -> Set[Site]: """Find all Sites in Redis.""" site_ids_key = self.key_schema.site_ids_key() client = kwargs.get('pipeline', self.redis) site_ids = client.smembers(site_ids_key) site_hashes = [] for site_id in site_ids: hash_key = self.key_schema.site_hash_key(site_id) site_hashes.append(client.hgetall(hash_key)) return {FlatSiteSchema().load(site_hash) for site_hash in site_hashes}
def find_all(self, **kwargs) -> Set[Site]: """Find all Sites in Redis.""" client = kwargs.get('pipeline', self.redis) site_ids_key = self.key_schema.site_ids_key() site_ids = client.smembers(site_ids_key) hash_keys = [ self.key_schema.site_hash_key(site_id) for site_id in site_ids ] site_hashes = [self.redis.hgetall(hash_key) for hash_key in hash_keys] return {FlatSiteSchema().load(site_hash) for site_hash in site_hashes}
def find_all(self, **kwargs) -> Set[Site]: """Find all Sites.""" site_ids = self.redis.zrange(self.key_schema.site_geo_key(), 0, -1) sites = set() for site_id in site_ids: key = self.key_schema.site_hash_key(site_id) site_hash = self.redis.hgetall(key) sites.add(FlatSiteSchema().load(site_hash)) return sites
def find_all(self, **kwargs) -> Set[Site]: """Find all Sites in Redis.""" # START Challenge #1 site_hashes = [] # type: ignore for site_id in self.redis.sscan_iter(self.key_schema.site_ids_key()): hash_key = self.key_schema.site_hash_key(site_id) site_hash = self.redis.hgetall(hash_key) site_hashes.append(site_hash) # END Challenge #1 return {FlatSiteSchema().load(site_hash) for site_hash in site_hashes}
def insert(self, site: Site, **kwargs): """Insert a Site into Redis.""" hash_key = self.key_schema.site_hash_key(site.id) client = kwargs.get('pipeline', self.redis) client.hset(hash_key, mapping=FlatSiteSchema().dump(site)) # type: ignore if not site.coordinate: raise ValueError("Site coordinates are required for Geo insert") client.geoadd( # type: ignore self.key_schema.site_geo_key(), site.coordinate.lng, site.coordinate.lat, site.id)
def find_all(self, **kwargs) -> Set[Site]: """Find all Sites in Redis.""" site_ids_key = self.key_schema.site_ids_key() site_ids = self.redis.smembers(site_ids_key) site_hashes: List[Site] = [] for site_id in site_ids: hash_key = self.key_schema.site_hash_key(site_id) site_hash = self.redis.hgetall(hash_key) site_hashes.append(site_hash) return {FlatSiteSchema().load(site_hash) for site_hash in site_hashes}
def find_all(self, **kwargs) -> Set[Site]: """Find all Sites.""" site_ids = self.redis.zrange(self.key_schema.site_geo_key(), 0, -1) sites = set() pipeline = self.redis.pipeline(transaction=False) for site_id in site_ids: key = self.key_schema.site_hash_key(site_id) pipeline.hgetall(key) result = pipeline.execute() for site_hash in result: sites.add(FlatSiteSchema().load(site_hash)) return sites
def find_all(self, **kwargs) -> Set[Site]: """Find all Sites.""" site_ids = self.redis.zrange(self.key_schema.site_geo_key(), 0, -1) sites = set() p = self.redis.pipeline(transaction=False) for site_id in site_ids: p.hgetall(self.key_schema.site_hash_key(site_id)) site_hashes = p.execute() for site_hash in [h for h in site_hashes if h is not None]: site_model = FlatSiteSchema().load(site_hash) sites.add(site_model) return sites
def _find_by_geo_with_capacity(self, query: GeoQuery, **kwargs) -> Set[Site]: # START Challenge #5 # Your task: Get the sites matching the GEO query. # END Challenge #5 p = self.redis.pipeline(transaction=False) # START Challenge #5 # # Your task: Populate a dictionary called "scores" whose keys are site # IDs and whose values are the site's capacity. # # Make sure to run any Redis commands against a Pipeline object # for better performance. # END Challenge #5 # Delete the next lines after you've populated a `site_ids` # and `scores` variable. site_ids: List[str] = [] capacity_scores_list = [] p.georadius(self.key_schema.site_geo_key(), \ query.coordinate.lng, query.coordinate.lat, \ query.radius, query.radius_unit.value) p.zrange(self.key_schema.capacity_ranking_key(), \ 0 , -1, withscores=True) result = p.execute() site_ids = result[0] capacity_scores_list = result[1] scores: Dict[str, float] = { site_id: capacity for site_id, capacity in capacity_scores_list } for site_id in site_ids: if scores[site_id] and scores[site_id] > CAPACITY_THRESHOLD: p.hgetall(self.key_schema.site_hash_key(site_id)) site_hashes = p.execute() return {FlatSiteSchema().load(site) for site in site_hashes}
def find_all(self, **kwargs) -> Set[Site]: """Find all Sites.""" site_ids = self.redis.zrange(self.key_schema.site_geo_key(), 0, -1) sites = set() pipeline = self.redis.pipeline(transaction=False) # create a pipeline for site_id in site_ids: key = self.key_schema.site_hash_key(site_id) #site_hash = self.redis.hgetall(key) #sites.add(FlatSiteSchema().load(site_hash)) pipeline.hgetall(key) # Queue hgetall commands in pipeline site_hashes = pipeline.execute() # Send commands to server for s_hash in site_hashes: sites.add(FlatSiteSchema().load(s_hash)) return sites
def find_all(self, **kwargs) -> Set[Site]: """Find all Sites in Redis.""" # START Challenge #1 site_hashes = [] # type: ignore # Get name of the Redis set site_ids_key = self.key_schema.site_ids_key() # Get members of the set site_ids_set_members = self.redis.smembers(site_ids_key) # Iterate thru each member for ID in site_ids_set_members: # Get hash key hash_key = self.key_schema.site_hash_key(ID) # Append dicts associated with the hash key site_hashes.append(self.redis.hgetall(hash_key)) # END Challenge #1 return {FlatSiteSchema().load(site_hash) for site_hash in site_hashes}
def _find_by_geo_with_capacity(self, query: GeoQuery, **kwargs) -> Set[Site]: client = kwargs.get('pipeline', self.redis) site_ids = client.georadius(self.key_schema.site_geo_key(), query.coordinate.lng, query.coordinate.lat, query.radius, query.radius_unit.value) p = self.redis.pipeline(transaction=False) for site_id in site_ids: p.zscore(self.key_schema.capacity_ranking_key(), site_id) scores = dict(zip(site_ids, p.execute())) for site_id in site_ids: if scores[site_id] and scores[site_id] > CAPACITY_THRESHOLD: p.hgetall(self.key_schema.site_hash_key(site_id)) site_hashes = p.execute() return {FlatSiteSchema().load(site) for site in site_hashes}
def _find_by_geo_with_capacity(self, query: GeoQuery, **kwargs) -> Set[Site]: site_ids = self.redis.georadius( # type: ignore self.key_schema.site_geo_key(), query.coordinate.lng, query.coordinate.lat, query.radius, query.radius_unit.value) scores = dict( self.redis.zrange(self.key_schema.capacity_ranking_key(), 0, -1, withscores=True)) p = self.redis.pipeline(transaction=False) for site_id in site_ids: if scores[site_id] and scores[site_id] > CAPACITY_THRESHOLD: p.hgetall(self.key_schema.site_hash_key(site_id)) site_hashes = p.execute() return {FlatSiteSchema().load(site) for site in site_hashes}
def test_site_schema_loads_coordinate(): site = Site(id=1, capacity=1, panels=1, address="Somewhere", city="Portland", state="OR", postal_code="97201", coordinate=Coordinate(lat=1.0, lng=1.1)) json = { "id": 1, "capacity": 1, "panels": 1, "address": "Somewhere", "city": "Portland", "state": "OR", "postal_code": "97201", "lat": 1, "lng": 1.1 } assert FlatSiteSchema().load(json) == site
def load(filename, delete_keys): """Load the specified JSON file into Redis""" conf = current_app.config hostname = conf['REDIS_HOST'] port = conf['REDIS_PORT'] key_prefix = conf['REDIS_KEY_PREFIX'] key_schema = KeySchema(key_prefix) client = get_connection(hostname=hostname, port=port) site_dao = SiteDaoRedis(client, key_schema) site_geo_dao = SiteGeoDaoRedis(client, key_schema) if delete_keys: for key in client.scan_iter(f"{key_prefix}:*"): client.delete(key) with open(filename, 'r') as f: sites = [FlatSiteSchema().load(d) for d in json.loads(f.read())] sites_bar = Bar('Loading sites', max=len(sites)) p = client.pipeline(transaction=False) for site in sites: sites_bar.next() site_dao.insert(site, pipeline=p) site_geo_dao.insert(site, pipeline=p) p.execute() print() sample_generator = SampleDataGenerator(client, sites, 1, key_schema) readings_bar = Bar('Generating metrics data', max=sample_generator.size) p = client.pipeline(transaction=False) for _ in sample_generator.generate(p): readings_bar.next() print("\nFinishing up...") p.execute() print("\nData load complete!")