Пример #1
0
def remove_memcache_key_by_geo_point(geo_point,namespace):
	'''
	Updates the geohash+deal_key memcache when one of the geohashes has been updated and the memcache now has stale information
	Typical use case: a new deal was uploaded and the memcache key (i.e. the new deals geohash) needs to be updated
	or deleted.
	@param geo_point: the geopoint of the deal
	@type geo_point: db.GeoPt
	@param namespace: the memcaches namespace
	@type namespace: MEMCACHE_ACTIVE_GEOHASH_NAMESPACE or MEMCACHE_TEST_GEOHASH_NAMESPACE
	'''
	geo_hash_6 = geohash.encode(geo_point.lat,geo_point.lon,precision=6)
	geo_hash_5 = geohash.encode(geo_point.lat, geo_point.lon, precision=5)
	geo_hash_list = [geo_hash_5,geo_hash_6]
	logging.info(geo_hash_list)
	#create the client
	client = memcache.Client() #@UndefinedVariable
	#safely update the memcache - while loop allows this to happen all over the place psuedo-concurrently
	failsafe = 0
	while True and failsafe <50:
		failsafe +=1
		logging.debug('failsafe: '+str(failsafe))
		if client.delete_multi(geo_hash_list, namespace=namespace):
			logging.debug('geohashes {} were deleted from memcache'.format(geo_hash_list))
			break
	return
Пример #2
0
def get_by_geo_box(bot_left,bot_right):
	
	hash_min = geohash.encode(bot_left.lat,bot_left.lon,precision=10)
	hash_max = geohash.encode(top_right.lat,top_right.lon,precision=10)
	
	businesses = levr.Business.all().filter('geo_hash >=',hash_min).filter('geo_hash <=',hash_max).fetch(None)
	logging.debug(businesses.__len__())
	return businesses
Пример #3
0
	def get(self):
		#take in geo_point
		#set radius, expand, get all deals
		
		
		
		request_point = levr.geo_converter('42.35,-71.110')
		center_hash = geohash.encode(request_point.lat,request_point.lon,precision=6)
		all_squares = geohash.expand(center_hash)
		
		all = levr.Business.all().count()
		self.response.out.write(all)
		self.response.out.write('<br/>')
		
		keys = []
		for query_hash in all_squares:
			q = levr.Business.all(keys_only=True).filter('geo_hash >=',query_hash).filter('geo_hash <=',query_hash+"{")
			keys.extend(q.fetch(None))
		
		self.response.out.write(str(keys.__len__())+"<br/>")
		
		#get all deals
		deals = levr.Business.get(keys)
		logging.debug(deals)
		for deal in deals:
			self.response.out.write(deal.geo_hash+"<br/>")
Пример #4
0
	def get(self):
# 		geo_point = [42.358431,-71.059773]
		geo_point = [42.3617863, -71.1359041]
		ghash = geohash.encode(geo_point[0], geo_point[1], classes.GHash._precision)
		geo_hash_entity = classes.GHash.get_or_insert(ghash)
		geo_hash_entity.populate()
		self.view_geohash(ghash)
Пример #5
0
    def get(self):
        t0 = datetime.now()
        self.set_plaintext()

        successes = []
        for geo_point in geo_points:
            try:
                ghash = geohash.encode(geo_point[0], geo_point[1],
                                       classes.GHash._precision)

                geo_hash_entity = classes.GHash.get_or_insert(ghash)
                # 				geo_hash_entity = classes.GHash.get_or_insert('drt2zp')
                # tile = osm.Osm(geo_hash_entity)
                # 				tile.get_nature()
                # 				tile.get_roads()
                # 				tile.get_buildings()
                # 				tile.get_leisure()
                geo_hash_entity.populate()
                self.view_geohash(geo_hash_entity.name)

            except Exception, e:
                utils.log_error(e)
            else:
                logging.info('fetch success for ' + str(geo_point))
                successes.append(geo_point)
Пример #6
0
    def full_search(self):
        """
		Provides a common method to perform a search on api and website
		
		@warning: should be wrapped in a try,except block
		@return: a list of obituaries
		@rtype: list
		"""
        name = self.rget("name", str) or None
        pob = self.rget("pob", str) or None
        pod = self.rget("pod", str) or None

        # join the string params together if they exist
        search_tokens = utils.tokenize_multi(name, pob, pod)
        logging.info(search_tokens)
        dob = self.rget("dob", self.parse_date) or None
        dod = self.rget("dod", self.parse_date) or None

        lat = self.rget("lat", float)
        lon = self.rget("lon", float)

        logging.info("Sending to search: ")
        logging.info(search_tokens)

        if lat and lon:
            # search by location
            ghash = geohash.encode(lat, lon)
            ghash_list = geohash.expand(ghash)

            precision = self.rget("precision", int) or 4

            return utils.search(search_tokens, dob, dod, ghash_list, precision)
        else:
            return utils.search(search_tokens, dob, dod)
Пример #7
0
def get_deals_in_area(tags,request_point,precision=5):
	'''
	tags = list of tags that are strings
	request point is db.GeoPt format
	precision is int
	'''
	request_point = levr.geo_converter('42.35,-71.110')
	logging.debug(precision)
	center_hash = geohash.encode(request_point.lat,request_point.lon,precision=precision)
	logging.debug(center_hash)
	hash_set = geohash.expand(center_hash)
	logging.debug(hash_set)
	
	##DEBUG
	ref_query = levr.Deal.all().filter('deal_status =','active')
	for tag in tags:
		if tag != 'all':
			ref_query.filter('tags =',tag)
	ref_deals = ref_query.fetch(None)
	logging.info("total number of deals: "+str(ref_deals.__len__()))
#	for d in ref_deals:
#		logging.debug(d.geo_hash)
	##/DEBUG
	
	
	####build search query
	#only grabbing deal keys, then batch get array
	deal_keys = []
	for query_hash in hash_set:
		#only grab keys for deals that have active status
		q = levr.Deal.all(keys_only=True).filter('deal_status =','active')
		#grab all deals where primary_cat is in tags
		for tag in tags:
			#all is a special keyword
			if tag != 'all':
				logging.debug('tag: '+str(tag))
				q.filter('tags =',tag)
		#filter by geohash
		q.filter('geo_hash >=',query_hash).filter('geo_hash <=',query_hash+"{") #max bound
#					logging.debug(q)
#					logging.debug(levr_utils.log_dict(q.__dict__))
		
		#get all keys for this neighborhood
		fetched_deals = q.fetch(None)
		logging.info('From: '+query_hash+", fetched: "+str(fetched_deals.__len__()))
		
		deal_keys.extend(fetched_deals)
#					logging.debug(deal_keys)
	
	#batch get results. here is where we would set the number of results we want and the offset
	deals = levr.Deal.get(deal_keys)
	
	logging.info('number of deals fetched: '+str(deals.__len__()))
	
	return deals
Пример #8
0
	def post(self):
		'''
		A user is geotagging a tombstone
		'''
		# get the blobstore key of the image if an image is uploaded
		try:
			logging.info(self.request)
			logging.info(self.request.headers)
			logging.info(self.request.params)
			# tombstone location
			lat = self.rget('lat',float,True)
			lon = self.rget('lon',float,True)
			ghash = geohash.encode(lat,lon)
			
			# dates
			dob = self.rget('dob',self.parse_date)
			dod = self.rget('dod',self.parse_date)
			# birth/death locations
			pob = self.rget('pob',str)
			pod = self.rget('pod',str)
			
			# other
			name = self.rget('name',str)
			tombstone_message = self.rget('tombstone_message',str) or None
			uploader_id = self.rget('uploader_id',int,True)
			
			# image
			logging.info(self.get_uploads())
			logging.info(self.get_uploads('image'))
			img_keys = [p.key() for p in self.get_uploads()]
			
			# make sure at least one field is entered
			assert dob or dod or pob or pod \
				or name or tombstone_message or img_keys, \
				'Must provide at least one identifying piece of information'
			
			ob = self.create_obituary(
									uploader_key = ndb.Key(models.AppUser,uploader_id),
									name = name,
									ghash = ghash,
									dob = dob,
									dod = dod,
									pob = pob,
									pod = pod,
									tombstone_message = tombstone_message,
									img_keys = img_keys
									)
			
			response = {
					'oid' : ob.key.id(),
					'img_url' : ob.get_photo_urls()
					}
		except AssertionError,e:
			return self.send_response(400,e.message,{'invalid_input':''})
Пример #9
0
	def get_value_for_datastore(self, model_instance):
		'''
		Persist a list of geo_hash prefixes for the instance
		@param model_instance:
		@type model_instance:
		'''
		logging.debug('geo hash property')
		geo_point = getattr(model_instance, 'geo_point')
		precision = 8
		geo_hash = geohash.encode(geo_point.lat, geo_point.lon, precision)
		
		return geo_hash
Пример #10
0
	def get(self):
		#pull all of the businesses
		#grab each of their geo_points
		#has geo_points into geo_hash
		#store geo_hash
		business_keys	= levr.Deal.all(keys_only=True).fetch(None)
		businesses		= levr.Deal.get(business_keys)
		for b in businesses:
			geo_point	= b.geo_point
			logging.debug(geo_point)
			geo_hash	= geohash.encode(geo_point.lat,geo_point.lon)
			logging.debug(geo_hash)
			b.geo_hash	= geo_hash
#			
		db.put(businesses)
Пример #11
0
    def get(self):
        self.set_plaintext()
        t0 = datetime.now()
        # 		geo_point = 42.3697785,-71.0391343 # maverick square
        successes = []
        for geo_point in geo_points:
            try:
                ghash = geohash.encode(geo_point[0], geo_point[1], classes.GHash._precision)
                geo_hash_entity = classes.GHash.get_or_insert(ghash)

                tile = gplaces.Gplaces(geo_hash_entity)
                tile.get_buildings()
            except Exception, e:
                logging.error(str(e))
            else:
                logging.info("fetch success for " + str(geo_point))
                successes.append(geo_point)
Пример #12
0
    def get(self):
        self.set_plaintext()
        t0 = datetime.now()
        #		geo_point = 42.3697785,-71.0391343 # maverick square
        successes = []
        for geo_point in geo_points:
            try:
                ghash = geohash.encode(geo_point[0], geo_point[1],
                                       classes.GHash._precision)
                geo_hash_entity = classes.GHash.get_or_insert(ghash)

                tile = gplaces.Gplaces(geo_hash_entity)
                tile.get_buildings()
            except Exception, e:
                logging.error(str(e))
            else:
                logging.info('fetch success for ' + str(geo_point))
                successes.append(geo_point)
Пример #13
0
 def POST(self):
     user_data = web.input()
     lat = float(user_data.lat)
     lng = float(user_data.lng)
     rest_name = user_data.rest_name
     adress = user_data.adress;
     user = lunch.get_current_user()
     dic = {'result':False,'type':0}
     if user and user.restnum==0:
         uid = user.id
         rests = model.db.query("select id from restuarant where username='******'" % user.username)
         if len(rests)>0:
             return lunch.write_json({'result':False,'message':'you already have a shop'})
         rest_id = model.db.insert('restuarant',hash_location=geohash.encode(lat, lng, 12),name=rest_name,username=user.username,lat=lat,lng=lng,adress=adress,maxdistance=500)
         rest = model.db.query('select * from restuarant where id=$rest_id',vars=locals())[0]
         rest.created_time = str(rest.created_time).split('.')[0]
         model.db.update('user',where='id=$uid',restnum=1,vars=locals())
         user.restnum = 1
         dic = {'result':True,'message':'success','type':1,'rest':rest}
         return lunch.write_json(dic)
     return lunch.write_json({'result':False,'message':'you have not login or you permission is not enough'})
Пример #14
0
 def GET(self):
     data = web.input()
     lat = float(data.lat)
     lng = float(data.lng)
     percision = 6
     if 'percision' in data:
         percision = int(data.percision)
     if percision>11:
         percision = 11
     elif percision<1:
         percision = 1
     hash_location = geohash.encode(lat, lng, 12)
     hash_location = hash_location[0:percision]
     neighbors = self.get_neighbors(hash_location,config.LOCATION_PRECISION)
     neighbors.sort()
     rs = model.db.query("SELECT * FROM restuarant WHERE hash_location >= '%s' AND hash_location <= '%s'" % (neighbors[0],neighbors[-1]))
     restuarants = []
     for restuarant in rs:
         restuarant.created_time = str(restuarant.created_time).split('.')[0]
         restuarants.append(restuarant)
     return lunch.write_json({'result':True,'restuarants':restuarants})
Пример #15
0
    def get(self):
        t0 = datetime.now()
        self.set_plaintext()

        successes = []
        for geo_point in geo_points:
            try:
                ghash = geohash.encode(geo_point[0], geo_point[1], classes.GHash._precision)

                geo_hash_entity = classes.GHash.get_or_insert(ghash)
                # 				geo_hash_entity = classes.GHash.get_or_insert('drt2zp')
                # tile = osm.Osm(geo_hash_entity)
                # 				tile.get_nature()
                # 				tile.get_roads()
                # 				tile.get_buildings()
                # 				tile.get_leisure()
                geo_hash_entity.populate()
                self.view_geohash(geo_hash_entity.name)

            except Exception, e:
                utils.log_error(e)
            else:
                logging.info("fetch success for " + str(geo_point))
                successes.append(geo_point)
Пример #16
0
def dealCreate(params,origin,upload_flag=True):
	'''pass in "self"'''
	logging.debug('DEAL CREATE')
	
	logging.debug("origin: "+str(origin))
	logging.debug(log_dict(params))
	
	
	logging.debug("image was uploaded: "+str(upload_flag))
	#init tags list for deal
	tags = []
	
	#business information - never create business unless old phone
		#just want to get tags to store on deal
	#get deal information
	#create deals with appropriate owners
	
	'''

	
	#####merchant_edit
		params = {
				'uid'			#uid is businessOwner
				'business'		#businessID
				'deal'			#dealID
				'deal_description'
				'deal_line1'
				'deal_line2'
				}
		!!! check for uploaded image !!!
		

	#####merchant_create
		params = {
				'uid'			#uid is businessOwner
				'business'
				'deal_line1'
				'deal_line2' 	#optional
				'deal_description'
				'img_key'
				}
		
	#####phone_existing_business
		params = {
				'uid' 			#uid is ninja
				'business' 
				'deal_description'
				'deal_line1'
				!!! no deal_line2 !!!
				}
	#####phone_new_business
		params = {
				'uid'			#uid is ninja
				'business_name'
				'geo_point'
				'vicinity'
				'types'
				'deal_description'
				'deal_line1'
				}
	#####admin_review
		params = {
				'uid'		#uid is ninja
				'deal'		#deal id
				'business'	#business id
				'deal_line1'
				'deal_line2'
				'deal_description'
				'tags'
				'end date'
				!!! other stuff !!!
				}
	'''
	
	
	#==== deal information ====#
	
	
	#==== business stuff ====#
	if origin == 'phone_new_business':
		#The business to which a deal is being uploaded is not targeted
		logging.debug('origin is phone, new business being added')
		
		
		#business name
		if 'business_name' in params:
			business_name = params['business_name']
			logging.debug("business name: "+str(business_name))
		else:
			raise KeyError('business_name not in params')
		#geo point
		
		if 'geo_point' in params:
			geo_point = params['geo_point']
			geo_point = levr.geo_converter(geo_point)
			logging.debug("geo point: "+str(geo_point))
			#create geohash from geopoint
			geo_hash = geohash.encode(geo_point.lat,geo_point.lon)
			logging.info(geo_hash)
		else:
			raise KeyError('geo_point not in params')
		
		#vicinity
		if 'vicinity' in params:
			vicinity = params['vicinity']
			logging.debug("vicinity: "+str(vicinity))
		else:
			raise KeyError('vicinity not in params')
		
		#types
		if 'types' in params:
			types = params['types']
			logging.debug('start types')
			logging.debug(types)
			logging.debug(type(types))
			types = levr.tagger(types)
			logging.debug(types)
			logging.debug('end types')
		else:
			raise KeyError('types not in params')
		#check if business exists - get businessID
#		business= levr.Business.gql("WHERE business_name=:1 and geo_point=:2", business_name, geo_point).get()
		business = levr.Business.all().filter('business_name =',business_name).filter('vicinity =',vicinity).get()
		logging.debug('start business info')
		logging.debug(log_model_props(business))
		logging.debug('end business info')
		
		if not business:
			logging.debug('business doesnt exist')
			#if a business doesn't exist in db, then create a new one
			business = levr.Business()
			
			
			
			
			#add data to the new business
			business.business_name 	= business_name
			business.vicinity 		= vicinity
			business.geo_point		= geo_point
			business.types			= types
			business.geo_hash		= geo_hash
			
			logging.debug(log_model_props(business))
			#put business
			business.put()
			
			
		else:
			logging.debug('business exists')
			#business exists- grab its tags
			logging.debug(geo_hash)
		
		#grab the businesses tags
		tags.extend(business.create_tags())
		#get businessID - not encrypted - from database
		businessID = business.key()
		logging.debug("businessID: "+str(businessID))
		
		#Create tags
		
		logging.debug('-------------------------------------------')
		logging.debug(tags)
	else:
		#BusinessID was passed, grab the business
		logging.debug('not oldphoone')
		
		if 'business' in params:
			businessID = params['business']
			businessID	= enc.decrypt_key(businessID)
			businessID	= db.Key(businessID)
			business	= levr.Business.get(businessID)
		else:
			raise KeyError('business not passed in params')
		#get the tags from the business
		tags.extend(business.create_tags())
		
		#grab all the other information that needs to go into the deals
		business_name 	= business.business_name
		geo_point		= business.geo_point
		vicinity		= business.vicinity
		geo_hash		= business.geo_hash
		
		logging.debug(log_model_props(business))
		

	logging.debug('!!!!!')
	#====Deal Information Lines ====#
	#deal line 1
	if 'deal_line1' in params:
		deal_text	= params['deal_line1']
		logging.debug(deal_text)
		tags.extend(levr.tagger(deal_text))
		logging.info(tags)
	else:
		raise KeyError('deal_line1 not passed in params')
	
	#deal line 2
	if origin != 'phone_existing_business' and origin != 'phone_new_business':
		if 'deal_line2' in params:
			secondary_name = params['deal_line2']
		else:
			secondary_name = False
		logging.debug(secondary_name)
		if secondary_name:
			#deal is bundled
			logging.debug('deal is bundled')
			tags.extend(levr.tagger(secondary_name))
			logging.info(tags)
			deal_type = 'bundle'
		else:
			#deal is not bundled
			'deal is NOT bundled'
			deal_type = 'single'
	else:
		#phone uploaded deals do not pass deal_line2
		deal_type = 'single'
	
	#description
	if 'deal_description' in params:
		description = params['deal_description']
		#truncate description to a length of 500 chars
		logging.debug(description.__len__())
		description = description[:500]
		logging.debug(description)
		tags.extend(levr.tagger(description))
		logging.info(tags)
	else:
		raise KeyError('deal_description not passed in params')
	
	
	
	
	#==== create the deal entity ====#
	if origin	== 'merchant_create':
		#web deals get active status and are the child of the owner
		ownerID = params['uid']
		ownerID = enc.decrypt_key(ownerID)
		
		deal = levr.Deal(parent = db.Key(ownerID))
		deal.is_exclusive		= True

	elif origin	=='merchant_edit':
		dealID	= params['deal']
		dealID	= enc.decrypt_key(dealID)
		deal	= levr.Deal.get(dealID)

	elif origin	=='phone_existing_business' or origin == 'phone_new_business':
		#phone deals are the child of a ninja
		logging.debug('STOP!')
		uid = enc.decrypt_key(params['uid'])

		deal = levr.CustomerDeal(parent = db.Key(uid))
		deal.is_exclusive		= False
		
		
		deal.date_end			= datetime.now() + timedelta(days=7)

	elif origin == 'admin_review':
		#deal has already been uploaded by ninja - rewriting info that has been reviewed
		dealID = enc.decrypt_key(params['deal'])
		deal = levr.CustomerDeal.get(db.Key(dealID))
		deal.been_reviewed		= True
		deal.date_start			= datetime.now()
		days_active				= int(params['days_active'])
		deal.date_end			= datetime.now() + timedelta(days=days_active)
		
		new_tags = params['extra_tags']
		tags.extend(levr.tagger(new_tags))
		logging.debug('!!!!!!!!!!!!')
		logging.debug(tags)
	
	
	#==== Link deal to blobstore image ====#
	if upload_flag == True:
		#an image has been uploaded, and the blob needs to be tied to the deal
		logging.debug('image uploaded')
		if origin == 'merchant_edit' or origin == 'admin_review':
			#an image was uploaded, so remove the old one.
			blob = deal.img
			blob.delete()
		#if an image has been uploaded, add it to the deal. otherwise do nothing.
		#assumes that if an image already exists, that it the old one has been deleted elsewhere
		blob_key = params['img_key']
		deal.img= blob_key
	else:
		#an image was not uploaded. do nothing
		logging.debug('image not uploaded')
	
	
	
	
	
	#add the data
	deal.deal_text 			= deal_text
	deal.deal_type			= deal_type
	deal.description 		= description
	deal.tags				= list(set(tags)) #list->set->list removes duplicates
	deal.business_name		= business_name
	deal.businessID			= businessID.__str__()
	deal.vicinity			= vicinity
	deal.geo_point			= geo_point
	logging.debug(geo_hash)
	deal.geo_hash			= geo_hash
	
	#secondary_name
	if deal_type == 'bundle':
		deal.secondary_name = secondary_name
	
	
	
	#put the deal
	deal.put()
	
	#dealput is the deal key i.e. dealID
	logging.debug(log_model_props(deal))
	logging.debug(log_model_props(business))
	
	share_url = create_share_url(deal)
	
	if origin == 'phone_existing_business' or origin =='phone_new_business':
		#needs share url and dealID
		return share_url,deal
	else:
		#return share url
		return share_url
Пример #17
0
def dealCreate(params,origin,upload_flag=True,**kwargs):
	'''
	
	@param params:
	@type params:
	@param origin:
	@type origin:
	@param upload_flag:
	@type upload_flag:
	
	@keyword expires: sets the expiration on a deal. If a merchant uploads,
	  expiration can be passed as 'never' and the deal will never expire
	
	'''
	logging.debug('DEAL CREATE')
	
	logging.debug("origin: "+str(origin))
	logging.debug(log_dict(params))
	
	
	logging.debug("image was uploaded: "+str(upload_flag))
	#init tags list for deal
	tags = []
	
	#business information - never create business unless old phone
		#just want to get tags to store on deal
	#get deal information
	#create deals with appropriate owners
	
	#==== deal information ====#
	
	
	#===========================================================================
	# #==== business stuff ====#
	#===========================================================================
	if origin == 'phone_new_business':
		#The business to which a deal is being uploaded is not targeted
		logging.debug('origin is phone, new business being added')
		
		
		#business name
		if 'business_name' in params:
			business_name = params['business_name']
			logging.debug("business name: "+str(business_name))
		else:
			raise Exception('business_name not in params')
		#geo point
		
		if 'geo_point' in params:
			geo_point = params['geo_point']
			logging.debug("geo point: "+str(geo_point))
			#create geohash from geopoint
			geo_hash = geohash.encode(geo_point.lat,geo_point.lon)
		else:
			raise Exception('geo_point not in params')
		
		#vicinity
		if 'vicinity' in params:
			vicinity = params['vicinity']
			logging.debug("vicinity: "+str(vicinity))
		else:
			raise Exception('vicinity not in params')
		
		
		#types
		if 'types' in params:
			types = params['types']
			types = tagger(types)
		else:
			raise KeyError('types not in params')
		#check if business exists - get businessID
		business = Business.all().filter('business_name =',business_name).filter('vicinity =',vicinity).get()
		logging.debug('start business info')
		logging.debug(log_model_props(business))
		logging.debug('end business info')
		
		if not business:
			logging.debug('business doesnt exist')
			#if a business doesn't exist in db, then create a new one
			business = Business()
			logging.debug(log_model_props(business))
			
			
			
			#add data to the new business
			business.business_name 	= business_name
			business.vicinity 		= vicinity
			business.geo_point		= geo_point
			business.types			= types
			business.geo_hash		= geo_hash
			
			#put business
			business.put()
			
			#fire off a task to check the foursquare similarity
			task_params = {
				'geo_str'		:	str(business.geo_point),
				'query'			:	business.business_name,
				'key'			:	str(business.key())
			}
			
			#if no foursquare business exists in the database, this should try to find a foursquare business and transfer information to it
			#what if there is already a foursquare business in the database?
			
			taskqueue.add(url='/tasks/businessHarmonizationTask',payload=json.dumps(task_params))
			
			
		else:
			logging.debug('business exists')
			#business exists- grab its tags
		
		
		#grab the businesses tags
		tags.extend(business.create_tags())
		#get businessID - not encrypted - from database
		businessID = business.key()
		logging.debug("businessID: "+str(businessID))
		
		#Create tags
		
		logging.debug('-------------------------------------------')
		logging.debug(tags)
	elif origin == 'phone_merchant':
		logging.info('phone_merchant, so do not create new business')
		business = params['business']
		tags.extend(business.create_tags())
		
		#grab all the other information that needs to go into the deals
		businessID		= str(business.key())
		business_name 	= business.business_name
		geo_point		= business.geo_point
		vicinity		= business.vicinity
		geo_hash		= business.geo_hash
	else:
		#BusinessID was passed, grab the business
		logging.debug('not oldphoone')
		
		if 'business' in params:
			businessID = params['business']
			businessID	= enc.decrypt_key(businessID)
			businessID	= db.Key(businessID)
			business	= Business.get(businessID)
		else:
			raise KeyError('business not passed in params')
		#get the tags from the business
		tags.extend(business.create_tags())
		
		#grab all the other information that needs to go into the deals
		business_name 	= business.business_name
		geo_point		= business.geo_point
		vicinity		= business.vicinity
		geo_hash		= business.geo_hash
		

	logging.debug('!!!!!')
	#===========================================================================
	# #====Deal Information Lines ====#
	#===========================================================================
	#deal line 1
	if 'deal_line1' in params:
		deal_text	= params['deal_line1'].decode()
		tags.extend(tagger(deal_text))
	else:
		raise KeyError('deal_line1 not passed in params')
	
	#deal line 2
	if origin != 'phone_existing_business' and origin != 'phone_new_business':
		if 'deal_line2' in params:
			secondary_name = params['deal_line2']
		else:
			secondary_name = False
		logging.debug(secondary_name)
		if secondary_name:
			#deal is bundled
			logging.debug('deal is bundled')
			tags.extend(tagger(secondary_name))
			logging.info(tags)
			deal_type = 'bundle'
		else:
			#deal is not bundled
#			'deal is NOT bundled'
			deal_type = 'single'
	else:
		#phone uploaded deals do not pass deal_line2
		deal_type = 'single'
	
	#description
	if 'deal_description' in params:
		description = params['deal_description'].decode()
		#truncate description to a length of 500 chars
		description = description[:500]
		tags.extend(tagger(description))
	else:
		raise KeyError('deal_description not passed in params')
	
	
	
	
	#==== create the deal entity ====#
	if origin == 'phone_merchant':
		logging.info('Origin from phone_merchant')
		user = params['user']
		
		if user.tester:
			deal_status = 'test'
		else:
			deal_status = 'active'
		
		
		
		deal = Deal(
				parent = user,
				is_exclusive = True,
				deal_status = deal_status,
				origin = 'merchant',
				pin_color = 'green',
				)
		
	elif origin	=='phone_existing_business' or origin == 'phone_new_business':
		#phone deals are the child of a ninja
		logging.debug('STOP!')
		uid = params['uid']
		
		# If it is one of the founders uploading a deal, then it should be uploaded by a rando ninja
		admin_users = ['Carl D.','Patch W.','Alonso H.','Ethan S.','Patrick W.','Pat W.']
		owner = Customer.get(uid)
		if owner.display_name in admin_users:
			undead_ninjas = Customer.all(keys_only=True).filter('email',UNDEAD_NINJA_EMAIL).fetch(None)
			uid = random.choice(undead_ninjas)
		
		deal = Deal(
						parent			= uid,
						is_exclusive	= False
						)
		
		if 'shareURL' in params:
			shareURL = params['shareURL']
			if shareURL:
				#shareURL was passed and is not empty
				logging.debug("shareURL: "+str(shareURL))
				share_id = shareURL.split('/')[-1] #grab share id
				logging.debug("share_id: "+str(share_id))
				deal.share_id = share_id
		
		
		
		development = params.get('development',False)
		if development:
			deal.deal_status = 'test'
		
		deal.date_end = datetime.now() + timedelta(days=7)
	
	#==== Link deal to blobstore image ====#
	if upload_flag == True:
		#an image has been uploaded, and the blob needs to be tied to the deal
		logging.debug('image uploaded')
		#if an image has been uploaded, add it to the deal. otherwise do nothing.
		#assumes that if an image already exists, that it the old one has been deleted elsewhere
		blob_key = params['img_key']
		deal.img= blob_key
	else:
		#an image was not uploaded. do nothing
		logging.debug('image not uploaded')
	
	
	
	
	
	
	#add the data
	try:
		deal.business = business
	except:
		log_error()
	deal.deal_text 			= deal_text
	deal.deal_type			= deal_type
	deal.description 		= description
	deal.tags				= list(set(tags)) #list->set->list removes duplicates
	deal.business_name		= business_name
	deal.businessID			= str(businessID)
	deal.vicinity			= vicinity
	deal.geo_point			= geo_point
	deal.geo_hash			= geo_hash
	
	#secondary_name
	if deal_type == 'bundle':
		deal.secondary_name = secondary_name
	
	
	
	#put the deal
	deal.put()
	
	remove_memcache_key_by_deal(deal)
	
	
	#dealput is the deal key i.e. dealID
	logging.debug(log_model_props(deal))
	logging.debug(log_model_props(business))
	
#	share_url = create_share_url(deal)

	
	if origin == 'phone_existing_business' or origin =='phone_new_business':
		#needs share url and dealID
		return deal
	else:
		#return share url
		return deal