def run(self, requester_location, desired_drivers, max_depth=5, requester_preferences=None): requester_location = Helpers.validate_geo_point(requester_location) result = [] last_bounding_box = (None, None) exp_factor = 1 while len(result) < desired_drivers and exp_factor <= max_depth: # TODO check if calculation of bounding_box is correct left = requester_location["lat"] + (pow(DriverFinder._lat_inc, exp_factor) / 69.0) top = 3960 * 2 * math.pi / 360 * math.cos(left) top_left = Helpers.validate_geo_point((left, top)) right = requester_location["lat"] - (pow(DriverFinder._lat_inc, exp_factor) / 69.0) bottom = 3960 * 2 * math.pi / 360 * math.cos(right) bottom_right = Helpers.validate_geo_point((right, bottom)) drivers_in_area = self.driver_bus.list_in_rectangle( top_left, bottom_right, True, last_bounding_box[0], last_bounding_box[1] # area to exclude ) # TODO parallel processing of calculate drivers scores for driver in drivers_in_area: score = self.calculate_driver_score(driver, requester_location, requester_preferences) if score > DriverFinder._score_cutoff: result.append((driver, score)) last_bounding_box = (top_left, bottom_right) print last_bounding_box exp_factor += 1 def get_score(item): return item[1] # return drivers ordered by score return [driver[0] for driver in sorted(result, key=get_score, reverse=True)]
def run_main(): cfg = Helpers.load_config() db_cfg = Helpers.load_ds_config() ds_provider = DSProvider.get() datasource = ds_provider.get_data_source(cfg["api"]["database"], cfg["env"]) # load database class and create database db_file = "dao.%s.base.DBBaseDao" % cfg["api"]["database"] db_loader = Helpers.get_class(db_file)(ds_provider, datasource) print "Creating database" db_loader.create_db(**db_cfg) # parse dao files and create tables cur_dir = os.path.dirname(os.path.realpath(__file__)) if not isinstance(cur_dir, unicode): cur_dir = cur_dir.decode("utf-8") _dao_path = cur_dir + "/dao/" + cfg["api"]["database"] _ignore_in_load = ["__init__.py", "base.py", "init_db.py"] for file_in_dir in os.listdir(_dao_path): if file_in_dir.endswith(".py") and not file_in_dir in _ignore_in_load: module_name = file_in_dir[:-3] dao_class_name = Helpers.file_name_to_class_name(module_name) + "Dao" print "Creating table for %s" % dao_class_name clazz = Helpers.get_class("dao.%s.%s.%s" % (cfg["api"]["database"], module_name, dao_class_name)) dao_obj = clazz(ds_provider, datasource) dao_obj.create_table(**db_cfg) print "Database created !"
def cancel_active_requests(self, requester_id): for request in self.list_active_per_user(requester_id): request.status = "canceled" if hasattr(request, "driver_id") and request.driver_id: Helpers.dispatch( "notify_driver_request_canceled", "Request canceled: %s" % request.serialize() ) self.save(request)
def list_in_rectangle( self, top_left, bottom_right, only_active=True, top_left_exclude=None, bottom_right_exclude=None ): return self.dao.list_in_rectangle( Helpers.validate_geo_point(top_left), Helpers.validate_geo_point(bottom_right), only_active, top_left_exclude, bottom_right_exclude, )
def create_request(self, to_obj, **args): if isinstance(to_obj, dict): to_obj = self.to_class(**to_obj) # check if user already has an active request active_requests = list(self.list_active_per_user(to_obj.requester_id)) if len(active_requests) > 0: raise UserHasActiveRequest() result = self.create(to_obj, **args) try: # send message to external service to find and notify drivers # to meet the request Helpers.dispatch( "find_and_notify_drivers", "New driver request: %s" % result.serialize() ) return result except Exception as e: self.cancel_active_requests(to_obj.requester_id) raise e
def _run_query(self, query, *fields, **kwargs): read_args = add_defaults(kwargs.get(self._READ_ARGS_LABEL, {}), self._default_read_args) if fields: read_args["_source"] = Helpers.concat(fields, ",") try: records = self.data_source.connection.search( index=self.data_source.index, doc_type=self._get_table_name(), body=query, params=read_args ) for record in records["hits"]["hits"]: yield self._record_to_to(record) except ElasticsearchException as e: if e.__class__ in self._EXCEPTION_IGNORE_ON_QUERY: self._log_exception(*e.args) raise
def get_by_pks(self, pks, *fields, **kwargs): read_args = add_defaults(kwargs.get(self._READ_ARGS_LABEL, {}), self._default_read_args) if fields: read_args["_source"] = Helpers.concat(fields, ",") try: records = self.data_source.connection.mget( index=self.data_source.index, doc_type=self._get_table_name(), body=dict(ids=list(pks)), params=read_args ) return {record["_id"]: self._record_to_to(record) if record["found"] else None for record in records["docs"]} except ElasticsearchException as e: if e.__class__ in self._EXCEPTION_IGNORE_ON_READ: self._log_exception(*e.args) return None raise
def get_by_pk(self, pk, *fields, **kwargs): read_args = add_defaults(kwargs.get(self._READ_ARGS_LABEL, {}), self._default_read_args) if fields: read_args["_source"] = Helpers.concat(fields, ",") try: record = self.data_source.connection.get( index=self.data_source.index, doc_type=self._get_table_name(), id=pk, params=read_args ) if record is not None: return self._record_to_to(record) except ElasticsearchException as e: if e.__class__ in self._EXCEPTION_IGNORE_ON_READ: self._log_exception(*e.args) return None raise
def __init__(self): self.cfg = Helpers.load_config() self.ds_name = self.cfg["api"]["database"] self.environ = self.cfg["env"] self.driver_bus = DriverBus(self.ds_name, self.environ) self.score_cutoff = 10 # ignore drivers with score lower than cutoff
def pk(self): return Helpers.concat([getattr(self, name) for name in self._pks])