def test_update_two_playgrounds(self): utils.load_test_playgrounds() response = self.client.post(url_for('update_playground'), data={ 'id': 1, 'name': 'NEW NAME' }) self.assertEqual(response.status_code, 302) redirect_url = '%s/playground/%s.html' % (app_config.S3_BASE_URL, Playground.get(id=1).slug) self.assertEqual(response.headers['Location'].split('?')[0], redirect_url) response = self.client.post(url_for('update_playground'), data={ 'id': 2, 'name': 'ANOTHER NEW NAME' }) self.assertEqual(response.status_code, 302) redirect_url = '%s/playground/%s.html' % (app_config.S3_BASE_URL, Playground.get(id=2).slug) self.assertEqual(response.headers['Location'].split('?')[0], redirect_url) with open('data/changes.json') as f: updates = json.load(f) self.assertEqual(len(updates), 2) self.assertEqual(updates[0]['action'], 'update') self.assertEqual(updates[0]['playground']['id'], 1) self.assertEqual(updates[0]['playground']['name'], 'NEW NAME') self.assertEqual(updates[1]['action'], 'update') self.assertEqual(updates[1]['playground']['id'], 2) self.assertEqual(updates[1]['playground']['name'], 'ANOTHER NEW NAME')
def process_insert(record): """ Process a single insert record from changes.json. """ playground = Playground() new_data = {} for key, value in record['playground'].items(): if key not in ['id', 'features']: new_data[key] = value setattr(playground, key, value) playground.save() revisions = [] for key, value in new_data.items(): revisions.append({ 'field': key, 'from': '', 'to': new_data[key] }) for feature in record['playground']['features']: PlaygroundFeature.create( slug=feature, playground=playground ) revisions.append({'field': feature, 'from': 0, 'to': 1}) return (playground, revisions)
def process_insert(record): """ Process a single insert record from changes.json. """ playground = Playground() new_data = {} for key, value in record['playground'].items(): if key not in ['id', 'features']: new_data[key] = value setattr(playground, key, value) playground.save() revisions = [] for key, value in new_data.items(): revisions.append({'field': key, 'from': '', 'to': new_data[key]}) for feature in record['playground']['features']: PlaygroundFeature.create(slug=feature, playground=playground) revisions.append({'field': feature, 'from': 0, 'to': 1}) return (playground, revisions)
def process_insert(record): """ Process a single insert record from changes.json. """ playground = Playground() record_dict = {} # Loop through each of the key/value pairs in the playground record. for key, value in record['playground'].items(): # Ignore some keys because they aren't really what we want to update. if key not in ['id', 'features']: # Update the record_dict with our new key/value pair. record_dict[key] = value setattr(playground, key, value) playground.save() # Create a list of revisions that were applied. revisions = [] # Our old data was captured up top. It's called old_ # This is the new It's just the record_dict from above. new_data = record_dict # Loop over the key/value pairs in the new data we have. for key, value in new_data.items(): # Set up an intermediate data structure for the revision. revision_dict = {} # Set up the data for this revision. revision_dict['field'] = key revision_dict['from'] = '' revision_dict['to'] = new_data[key] # Append it to the revisions list. revisions.append(revision_dict) # Check to see if we have any incoming features. try: features = record['playground']['features'] except KeyError: features = [] for feature in features: PlaygroundFeature.create( slug=feature, playground=playground ) revisions.append({'field': feature, 'from': 0, 'to': 1}) return (playground, revisions)
def delete_playground_confirm(playground_slug=None): from flask import request # Only handle POST requests. if request.method == 'GET' and playground_slug: # Run the id through Playground and flag it as deactivated Playground.get(slug=playground_slug).deactivate() return json.dumps({"slug": playground_slug, "action": "delete", "success": True})
def query_by_owner(self, user, status='all'): logger.info('NdbPlaygroundDao:: DBHIT: query_by_owner for %s ' % user.email) owner_query = Playground.query() if not user_has_role(user, 'admin'): owner_query = Playground.query(ndb.OR(Playground.owners == user.key, Playground.created_by == user.key, Playground.updated_by == user.key)) if status != 'all': status_value = STATUS_DICT.get(status) owner_query = owner_query.filter(status == status_value) owner_query = owner_query.order(-Playground.updated_on) return owner_query.fetch()
def process_insert(record): """ Process a single insert record from changes.json. """ playground = Playground() record_dict = {} # Loop through each of the key/value pairs in the playground record. for key, value in record['playground'].items(): # Ignore some keys because they aren't really what we want to update. if key not in ['id', 'features']: # Update the record_dict with our new key/value pair. record_dict[key] = value setattr(playground, key, value) playground.save() # Create a list of revisions that were applied. revisions = [] # Our old data was captured up top. It's called old_ # This is the new It's just the record_dict from above. new_data = record_dict # Loop over the key/value pairs in the new data we have. for key, value in new_data.items(): # Set up an intermediate data structure for the revision. revision_dict = {} # Set up the data for this revision. revision_dict['field'] = key revision_dict['from'] = '' revision_dict['to'] = new_data[key] # Append it to the revisions list. revisions.append(revision_dict) # Check to see if we have any incoming features. try: features = record['playground']['features'] except KeyError: features = [] for feature in features: PlaygroundFeature.create(slug=feature, playground=playground) revisions.append({'field': feature, 'from': 0, 'to': 1}) return (playground, revisions)
def search_datastore(self, status, curs, **params): if status != 'all': status_value = STATUS_DICT.get(status) logger.debug('status %d ' % (status_value)) search_query = Playground.query(Playground.status == status_value) else: search_query = Playground.query() for key, value in params.items(): #Constructing named queries for structured properties the round about way. # TO explore a better way later. if '.' in key and value is not None: struct, attr = key.split('.') if struct == 'address': search_query = search_query.filter(getattr(Playground.address, attr) == value) elif struct == 'contactInfo': search_query = search_query.filter(getattr(Playground.contact_info, attr) == value) else: if key == 'sport' and value is not None: search_query = self.build_query_for_sport(search_query, value, True) elif value is not None: search_query = search_query.filter(getattr(Playground, key) == value) #search_query = search_query.order(-Playground.updated_on) logger.info('NdbPlaygroundDao:: DBHIT: search query ' + str(search_query)) #return search_query.fetch() search_forward = search_query.order(-Playground.updated_on) search_reverse = search_query.order(Playground.updated_on) # Fetch a page going forward. logger.info('NdbPlaygroundDao:: DBHIT: Cursor ' + str(curs)) if curs is not None: curs = Cursor(urlsafe=curs) next_data, next_curs, next_more = search_forward.fetch_page(PAGE_SIZE, start_cursor=curs) prev_data, prev_curs, prev_more = search_reverse.fetch_page(PAGE_SIZE, start_cursor=next_curs.reversed()) else: next_data, next_curs, next_more = search_forward.fetch_page(PAGE_SIZE) if next_curs is not None: prev_data, prev_curs, prev_more = search_reverse.fetch_page(PAGE_SIZE, start_cursor=next_curs.reversed()) else: prev_data = None prev_curs = None prev_more = None # Fetch the same page going backward. #if next_curs is not None: # rev_cursor = next_curs.reversed() # prev_data, prev_curs, prev_more = search_reverse.fetch_page(6, start_cursor=rev_cursor) #else: #curs = Cursor(urlsafe=curs) #prev_data, prev_curs, prev_more = search_reverse.fetch_page(6, start_cursor=curs) return (next_data, next_curs, next_more, prev_data, prev_curs, prev_more)
def index(): """ Playgrounds index page. """ context = make_context() metros = app_config.METRO_AREAS for metro in metros: metro['playground_count'] = Playground.select().where(Playground.zip_code << metro['zip_codes']).count() context['playground_count'] = intcomma(Playground.select().count()) context['metros'] = metros return render_template('index.html', **context)
def index(): """ Playgrounds index page. """ context = make_context() metros = app_config.METRO_AREAS for metro in metros: metro['playground_count'] = Playground.select().where( Playground.zip_code << metro['zip_codes']).count() context['playground_count'] = intcomma(Playground.select().count()) context['metros'] = metros return render_template('index.html', **context)
def test_process_inserts(self): models.delete_tables() models.create_tables() new_playgrounds, revision_group = data.process_changes( 'tests/data/test_inserts.json') self.assertEqual(len(new_playgrounds), 1) playground = Playground.select().where( Playground.id == new_playgrounds[0].id)[0] self.assertEqual(playground.name, 'NEW NAME') revisions = Revision.select().where( Revision.revision_group == revision_group) self.assertEqual(revisions.count(), 1) revision = revisions[0] self.assertEqual(revision.playground.id, playground.id) log = revision.get_log() self.assertEqual(len(log), 1) self.assertEqual(log[0]['field'], 'name') self.assertEqual(log[0]['from'], '') self.assertEqual(log[0]['to'], 'NEW NAME') headers = revision.get_headers() self.assertEqual(headers['content_length'], '18') self.assertEqual(headers['host'], 'localhost') cookies = revision.get_cookies()
def test_process_updates_simple(self): utils.load_test_playgrounds() updated_playgrounds, revision_group = data.process_changes('tests/data/test_updates_simple.json') self.assertEqual(len(updated_playgrounds), 1) playground = Playground.select().where(Playground.id == updated_playgrounds[0].id)[0] self.assertEqual(playground.id, 1) self.assertEqual(playground.name, 'NEW NAME') revisions = Revision.select().where(Revision.revision_group == revision_group) self.assertEqual(revisions.count(), 1) revision = revisions[0] self.assertEqual(revision.playground.id, playground.id) log = revision.get_log() self.assertEqual(len(log), 2) self.assertEqual(log[0]['field'], 'name') self.assertEqual(log[0]['from'], 'Strong Reach Playground') self.assertEqual(log[0]['to'], 'NEW NAME') self.assertEqual(log[1]['field'], 'smooth-surface-throughout') self.assertEqual(log[1]['from'], 1) self.assertEqual(log[1]['to'], 0) headers = revision.get_headers() self.assertEqual(headers['content_length'], '18') self.assertEqual(headers['host'], 'localhost') cookies = revision.get_cookies() self.assertEqual(len(cookies), 0)
def test_process_inserts(self): models.delete_tables() models.create_tables() new_playgrounds, revision_group = data.process_changes('tests/data/test_inserts.json') self.assertEqual(len(new_playgrounds), 1) playground = Playground.select().where(Playground.id == new_playgrounds[0].id)[0] self.assertEqual(playground.name, 'NEW NAME') revisions = Revision.select().where(Revision.revision_group == revision_group) self.assertEqual(revisions.count(), 1) revision = revisions[0] self.assertEqual(revision.playground.id, playground.id) log = revision.get_log() self.assertEqual(len(log), 1) self.assertEqual(log[0]['field'], 'name') self.assertEqual(log[0]['from'], '') self.assertEqual(log[0]['to'], 'NEW NAME') headers = revision.get_headers() self.assertEqual(headers['content_length'], '18') self.assertEqual(headers['host'], 'localhost') cookies = revision.get_cookies()
def test_remove_from_search_index(self): app_config.configure_targets('staging') utils.load_test_playgrounds() playground = Playground.select()[0] sdf = playground.sdf() sdf['id'] = 'test_%i' % playground.id sdf['fields']['name'] = 'THIS IS NOT A PLAYGROUND NAME axerqwak' sdf['fields']['deployment_target'] = 'test' response = requests.post('http://%s/2011-02-01/documents/batch' % app_config.CLOUD_SEARCH_DOC_DOMAIN, data=json.dumps([sdf]), headers={ 'Content-Type': 'application/json' }) self.assertEqual(response.status_code, 200) # Monkey patch delete_sdf to so it return test id delete_sdf = playground.delete_sdf() delete_sdf['id'] = 'test_%i' % playground.id delete_sdf['version'] = sdf['version'] + 1 old_func = playground.delete_sdf playground.delete_sdf = lambda: delete_sdf playground.remove_from_search_index() playground.delete_sdf = old_func response = requests.get('http://%s/2011-02-01/search' % app_config.CLOUD_SEARCH_DOMAIN, params={ 'q': 'axerqwak' }, headers={ 'Cache-Control': 'revalidate' }) self.assertEqual(response.status_code, 200) self.assertEqual(response.json()['hits']['found'], 0) app_config.configure_targets(None)
def index(): """ Playgrounds index page. """ context = make_context() context['playground_count'] = intcomma(Playground.select().count()) return render_template('index.html', **context)
def test_playground_exists(self): utils.load_test_playgrounds() playground = Playground.get(id=1) response = self.client.get(url_for("_playground", playground_slug=playground.slug)) assert playground.display_name in response.data
def test_playground_exists(self): utils.load_test_playgrounds() playground = Playground.get(id=1) response = self.client.get( url_for('_playground', playground_slug=playground.slug)) assert playground.display_name in response.data
def get_featured(self, city_name=None, sport=None, no_record=8): logger.info('NdbPlaygroundDao:: DBHIT: get_featured for %s, %s, %s ' % (city_name, sport, no_record)) playground_query = Playground.query(Playground.status == 2, Playground.featured == True) if city_name is not None: playground_query = playground_query.filter(Playground.address.city == city_name.lower()) if sport is not None: playground_query = self.build_query_for_sport(playground_query, sport, True) playground_query = playground_query.order(-Playground.created_on) return list(playground_query.fetch(no_record))
def test_delete_playground_confirm(self): utils.load_test_playgrounds() app_config.configure_targets('staging') s3 = boto.connect_s3() bucket = s3.get_bucket(app_config.S3_BUCKETS[0]) k = Key(bucket) k.key = '%s/playground/%s.html' % (app_config.PROJECT_SLUG, Playground.get(id=1).slug) k.set_contents_from_string('foo') response = self.client.get(url_for('delete_playground_confirm', playground_slug=Playground.get(id=1).slug)) self.assertEqual(response.status_code, 200) self.assertFalse(Playground.get(id=1).active) self.assertIsNone(bucket.get_key(k.key)) app_config.configure_targets(None)
def process_update(record): """ Process a single update record from changes.json. """ playground_id = record['playground']['id'] playground = Playground.get(id=playground_id) old_data = copy.copy(playground.__dict__['_data']) new_data = {} for key, value in record['playground'].items(): if key not in ['id', 'features']: new_data[key] = value setattr(playground, key, value) playground.save() old_features = [] for feature in PlaygroundFeature.select().where(PlaygroundFeature.playground == playground_id): old_features.append(feature.slug) # Delete any features already attached to this playground. PlaygroundFeature.delete().where(PlaygroundFeature.playground == playground_id).execute() new_features = record['playground']['features'] for slug in new_features: PlaygroundFeature.create( slug=slug, playground=playground ) revisions = [] for key, value in new_data.items(): if old_data[key] != new_data[key]: revisions.append({ 'field': key, 'from': old_data[key], 'to': new_data[key] }) if set(old_features) != set(new_features): for feature in copytext.Copy(app_config.COPY_PATH)['feature_list']: slug = feature['key'] # Removed if slug in old_features and slug not in new_features: revisions.append({'field': slug, 'from': 1, 'to': 0}) # Added if slug in new_features and slug not in old_features: revisions.append({'field': slug, 'from': 0, 'to': 1}) return playground, revisions
def process_update(record): """ Process a single update record from changes.json. """ playground_id = record['playground']['id'] playground = Playground.get(id=playground_id) old_data = copy.copy(playground.__dict__['_data']) new_data = {} for key, value in record['playground'].items(): if key not in ['id', 'features']: new_data[key] = value setattr(playground, key, value) playground.save() old_features = [] for feature in PlaygroundFeature.select().where( PlaygroundFeature.playground == playground_id): old_features.append(feature.slug) # Delete any features already attached to this playground. PlaygroundFeature.delete().where( PlaygroundFeature.playground == playground_id).execute() new_features = record['playground']['features'] for slug in new_features: PlaygroundFeature.create(slug=slug, playground=playground) revisions = [] for key, value in new_data.items(): if old_data[key] != new_data[key]: revisions.append({ 'field': key, 'from': old_data[key], 'to': new_data[key] }) if set(old_features) != set(new_features): for feature in copytext.Copy(app_config.COPY_PATH)['feature_list']: slug = feature['key'] # Removed if slug in old_features and slug not in new_features: revisions.append({'field': slug, 'from': 1, 'to': 0}) # Added if slug in new_features and slug not in old_features: revisions.append({'field': slug, 'from': 0, 'to': 1}) return playground, revisions
def load_playgrounds(path='data/playgrounds.csv'): """ Load playground data from the CSV into sqlite. NOTE: THIS HAS NOT BEEN TESTED IN A VERY LONG TIME. """ features = copytext.Copy(app_config.COPY_PATH)['feature_list'] with open(path) as f: rows = CSVKitDictReader(f) for row in rows: if row['Duplicate'] == 'TRUE': #print 'Skipping duplicate: %s' % row['NAME'] continue playground = Playground.create( nprid=row['NPRID'], name=row['NAME'], facility=row['FACILITY'], facility_type=row['FACILITY_TYPE'], address=row['ADDRESS'], city=row['CITY'], state=row['STATE'], zip_code=row['ZIP'], longitude=float(row['LONGITUDE']) if row['LONGITUDE'] else None, latitude=float(row['LATITUDE']) if row['LATITUDE'] else None, agency=row['Agency'], agency_type=row['AgencyType'], owner=row['OWNER'], owner_type=row['OWNER_TYPE'], remarks=row['REMARKS'], public_remarks=row['PubRermarks'], url=row['url'], entry=row['Entry'], source=row['Source'] ) for feature in features: slug = feature['key'] if row[slug] == 'TRUE': PlaygroundFeature.create( slug=slug, playground=playground ) Revision.create( timestamp=datetime.datetime.now(pytz.utc), action='insert', playground=playground, log=json.dumps([]), headers='', cookies='', revision_group=1 )
def delete_playground_confirm(playground_slug=None): """ Confirm deleting a playground. """ from flask import request if request.method != 'GET': abort(401) if not playground_slug: abort(400) Playground.get(slug=playground_slug).deactivate() return json.dumps({ 'slug': playground_slug, 'action': 'delete', 'success': True })
def test_process_update_only_one(self): utils.load_test_playgrounds() updated_playgrounds, revision_group = data.process_changes('tests/data/test_updates_simple.json') self.assertEqual(len(updated_playgrounds), 1) playground = Playground.select().where(Playground.id != updated_playgrounds[0].id)[0] self.assertNotEqual(playground.id, 1) self.assertNotEqual(playground.name, 'NEW NAME')
def process_delete(record): """ Create a revision from the delete requests. """ playground_slug = record['playground']['slug'] playground = Playground.get(slug=playground_slug) revisions = [{"field": "active", "from": True, "to": False}, {"field": "reason", "from": "", "to": record['playground']['text']}] return (playground, revisions)
def get_active(self, city_name=None, sport=None, no_record=8): logger.info('NdbPlaygroundDao:: DBHIT: get_active for %s, %s, %s ' % (city_name, sport, no_record)) playground_query = Playground.query(Playground.status == 2) if city_name is not None: playground_query = playground_query.filter(Playground.address.city == city_name.lower()) if sport is not None: playground_query = self.build_query_for_sport(playground_query, sport) playground_query = playground_query.order(-Playground.created_on) if no_record > -1: return list(playground_query.fetch(no_record)) else: #return all. simulating -1 for app engine return list(playground_query.fetch())
def test_delete_playground_confirm(self): utils.load_test_playgrounds() app_config.configure_targets('staging') s3 = boto.connect_s3() bucket = s3.get_bucket(app_config.S3_BUCKETS[0]) k = Key(bucket) k.key = '%s/playground/%s.html' % (app_config.PROJECT_SLUG, Playground.get(id=1).slug) k.set_contents_from_string('foo') response = self.client.get( url_for('delete_playground_confirm', playground_slug=Playground.get(id=1).slug)) self.assertEqual(response.status_code, 200) self.assertFalse(Playground.get(id=1).active) self.assertIsNone(bucket.get_key(k.key)) app_config.configure_targets(None)
def test_process_update_only_one(self): utils.load_test_playgrounds() updated_playgrounds, revision_group = data.process_changes( 'tests/data/test_updates_simple.json') self.assertEqual(len(updated_playgrounds), 1) playground = Playground.select().where( Playground.id != updated_playgrounds[0].id)[0] self.assertNotEqual(playground.id, 1) self.assertNotEqual(playground.name, 'NEW NAME')
def get_recommend(self, locality=None, sport=None, no_record=8): logger.info('NdbPlaygroundDao:: DBHIT: get_recommend for %s, %s, %s ' % (locality, sport, no_record)) playground_query = Playground.query(Playground.status == 2) if locality is not None and locality != '' and locality != 'None': playground_query = playground_query.filter(ndb.OR(Playground.address.locality == locality.lower() , Playground.address.city == locality.lower())) if sport is not None and sport != '' and sport != 'None': playground_query = self.build_query_for_multisport(playground_query, sport) playground_query = playground_query.order(-Playground.created_on) if no_record > -1: return list(playground_query.fetch(no_record)) else: #return all. simulating -1 for app engine return list(playground_query.fetch())
def post(self): upload_files = self.get_uploads("cover_image") id = self.request.get("pg_id") pg = Playground.get_by_id(long(id)) redirect_url = self.request.get("continue").encode('ascii', 'ignore') if upload_files is not None and len(upload_files) > 0: blob_info = upload_files[0] pg.cover = blob_info.key() pg.put() mc_delete(cache_keys.get_playground_cache_key(long(id))) logger.info('Cover image link: ' + images.get_serving_url(pg.cover)) return self.redirect(redirect_url)
def load_playgrounds(path='data/playgrounds.csv'): """ Load playground data from the CSV into sqlite. NOTE: THIS HAS NOT BEEN TESTED IN A VERY LONG TIME. """ features = copytext.Copy(app_config.COPY_PATH)['feature_list'] with open(path) as f: rows = CSVKitDictReader(f) for row in rows: if row['Duplicate'] == 'TRUE': #print 'Skipping duplicate: %s' % row['NAME'] continue playground = Playground.create( nprid=row['NPRID'], name=row['NAME'], facility=row['FACILITY'], facility_type=row['FACILITY_TYPE'], address=row['ADDRESS'], city=row['CITY'], state=row['STATE'], zip_code=row['ZIP'], longitude=float(row['LONGITUDE']) if row['LONGITUDE'] else None, latitude=float(row['LATITUDE']) if row['LATITUDE'] else None, agency=row['Agency'], agency_type=row['AgencyType'], owner=row['OWNER'], owner_type=row['OWNER_TYPE'], remarks=row['REMARKS'], public_remarks=row['PubRermarks'], url=row['url'], entry=row['Entry'], source=row['Source']) for feature in features: slug = feature['key'] if row[slug] == 'TRUE': PlaygroundFeature.create(slug=slug, playground=playground) Revision.create(timestamp=datetime.datetime.now(pytz.utc), action='insert', playground=playground, log=json.dumps([]), headers='', cookies='', revision_group=1)
class Test_Playground(unittest.TestCase): def setUp(self): self.obj_1 = Playground(5) def test_initial_Playground(self): assert self.obj_1.size == 5 def test_no_value(self): with pytest.raises(Exception) as e_info: obj = Playground() def test_build_playground(self): field = self.obj_1.build_playground() assert list(field[1][1])[0] == 7
def process_delete(record): """ Create a revision from the delete requests. """ playground_slug = record['playground']['slug'] playground = Playground.get(slug=playground_slug) revisions = [ { 'field': 'active', 'from': True, 'to': False}, { 'field': 'reason', 'from': '', 'to': record['playground']['text'] } ] return (playground, revisions)
def test_load_playgrounds(self): with open('tests/data/test_playgrounds.csv') as f: reader = CSVKitDictReader(f) rows = list(reader) non_duplicate = filter(lambda r: r['Duplicate'] != 'TRUE', rows) utils.load_test_playgrounds() playgrounds = Playground.select() self.assertEqual(len(non_duplicate), playgrounds.count()) features = PlaygroundFeature.select() self.assertEqual(features.count(), 2)
def test_process_updates_features(self): utils.load_test_playgrounds() PlaygroundFeature.create( slug='transfer-stations-to-play-components', playground=Playground.get(id=1) ) # JSON adds one feature and removes the one just created updated_playgrounds, revision_group = data.process_changes('tests/data/test_updates_features.json') features = PlaygroundFeature.select().where(PlaygroundFeature.playground == 1) self.assertEqual(features.count(), 1) feature = features[0] self.assertEqual(feature.slug, 'smooth-surface-throughout')
def post(self): params = {} datas = [] datas_count = 0 type = self.request.get('type') logger.info('Type: %s' % type) if type != 'None': if type == 'event': datas = Event.query().fetch() elif type == 'playground': datas = Playground.query().fetch() elif type == 'trainingcentre': datas = TrainingCentre.query().fetch() for data in datas: if data.address.latlong is None: latitude, longitude = get_latlong_from_address( data.address) if latitude is not None and longitude is not None: data.address.latlong = ndb.GeoPt(latitude, longitude) logger.info('New Lat Long: ' + str(data.address.latlong)) logger.info('Populated Data: ' + str(data)) if type == 'event': key = self.eventDao.persist(data, self.user_info) elif type == 'playground': key = self.playgroundDao.persist(data, self.user_info) elif type == 'trainingcentre': key = self.trainingCentreDao.persist(data, self.user_info) if key is not None: datas_count += 1 logger.info(str(key.id()) + ' succesfully search updated') logger.info('%s %s Entities Updated Search Successfully' % (datas_count, type)) message = ('%s %s Entities Updated Search Successfully' % (datas_count, type)) self.add_message(message, 'success') return self.redirect_to('search-update') logger.error('Select anyone of Type') message = ('Select anyone of Type') self.add_message(message, 'error') return self.redirect_to('search-update')
def test_process_updates_features(self): utils.load_test_playgrounds() PlaygroundFeature.create(slug='transfer-stations-to-play-components', playground=Playground.get(id=1)) # JSON adds one feature and removes the one just created updated_playgrounds, revision_group = data.process_changes( 'tests/data/test_updates_features.json') features = PlaygroundFeature.select().where( PlaygroundFeature.playground == 1) self.assertEqual(features.count(), 1) feature = features[0] self.assertEqual(feature.slug, 'smooth-surface-throughout')
def _playground(playground_slug): """ Playground detail page. """ from flask import request context = make_context() context['playground'] = Playground.get(slug=playground_slug) context['fields'] = context['playground'].update_form() context['features'] = context['playground'].update_features_form() context['revisions'] = Revision.select()\ .where(Revision.playground == context['playground'].id)\ .where((Revision.action == 'insert') | (Revision.action == 'update'))\ .order_by(Revision.timestamp.desc()) context['display_field_name'] = display_field_name context['path'] = request.path return render_template('playground.html', **context)
def update_playground(): """ Update a single playground. """ from flask import request if request.method != 'POST': abort(401) playground = Playground.get(id=request.form.get('id')) payload = create_change_payload('update', request) payload['playground']['id'] = int(request.form.get('id')) write_data(payload) return redirect('%s/playground/%s.html?action=editing_thanks' % (app_config.S3_BASE_URL, playground.slug))
def form_to_dao(self, playground_id): playground = None if playground_id is not None and len(playground_id) > 1: playground = self.playgroundDao.get_record(long(playground_id)) else: playground = Playground() playground.name = self.form.name.data playground.sport = self.form.sport.data.lower() #Create an automatic alias for the playground playground.alias = utils.slugify(self.form.name.data) playground.description = self.form.description.data playground.featured = self.form.featured.data self.form.address.locality.data = self.form.locality.data #for locality from basic info to address self.form.address.city.data = self.form.city.data #for city from basic info to address playground = cms_utils.form_to_dao_address(self.form, playground) playground = cms_utils.form_to_dao_contact_info(self.form, playground) return playground
def process_delete(record): """ Create a revision from the delete requests. """ playground_slug = record['playground']['slug'] playground = Playground.get(slug=playground_slug) revisions = [{ 'field': 'active', 'from': True, 'to': False }, { 'field': 'reason', 'from': '', 'to': record['playground']['text'] }] return (playground, revisions)
def process_delete(record): """ Create a revision from the delete requests. """ playground_slug = record['playground']['slug'] playground = Playground.get(slug=playground_slug) revisions = [{ "field": "active", "from": True, "to": False }, { "field": "reason", "from": "", "to": record['playground']['text'] }] return (playground, revisions)
def test_remove_from_search_index(self): app_config.configure_targets('staging') utils.load_test_playgrounds() playground = Playground.select()[0] sdf = playground.sdf() sdf['id'] = 'test_%i' % playground.id sdf['fields']['name'] = 'THIS IS NOT A PLAYGROUND NAME axerqwak' sdf['fields']['deployment_target'] = 'test' response = requests.post('http://%s/2011-02-01/documents/batch' % app_config.CLOUD_SEARCH_DOC_DOMAIN, data=json.dumps([sdf]), headers={'Content-Type': 'application/json'}) self.assertEqual(response.status_code, 200) # Monkey patch delete_sdf to so it return test id delete_sdf = playground.delete_sdf() delete_sdf['id'] = 'test_%i' % playground.id delete_sdf['version'] = sdf['version'] + 1 old_func = playground.delete_sdf playground.delete_sdf = lambda: delete_sdf playground.remove_from_search_index() playground.delete_sdf = old_func response = requests.get('http://%s/2011-02-01/search' % app_config.CLOUD_SEARCH_DOMAIN, params={'q': 'axerqwak'}, headers={'Cache-Control': 'revalidate'}) self.assertEqual(response.status_code, 200) self.assertEqual(response.json()['hits']['found'], 0) app_config.configure_targets(None)
def test_prepare_email(self): utils.load_test_playgrounds() playground = Playground.get(id=1) log = '''[{ "field": "name", "from": "%s", "to": "Test Playground" }]''' % playground.name Revision(action='update', timestamp=time.mktime( datetime.datetime.now(pytz.utc).timetuple()), playground=playground, log=log, headers='', cookies='', revision_group=1).save() body = app._prepare_email(1) self.assertTrue(body.find(playground.name) >= 0)
def test_prepare_email(self): utils.load_test_playgrounds() playground = Playground.get(id=1) log = '''[{ "field": "name", "from": "%s", "to": "Test Playground" }]''' % playground.name Revision( action='update', timestamp=time.mktime(datetime.datetime.now(pytz.utc).timetuple()), playground=playground, log=log, headers='', cookies='', revision_group=1 ).save() body = app._prepare_email(1) self.assertTrue(body.find(playground.name) >= 0)
import app_config from models import Playground SEARCH_DISTANCE = 0.001 matched = set() def print_playground(playground): print '%s: http://%s/%s/playground/%s' % ( playground.name or 'unnamed', app_config.PRODUCTION_S3_BUCKETS[0], app_config.PROJECT_SLUG, playground.slug ) for playground in Playground.select(): if playground.id in matched: continue lat = playground.latitude lng = playground.longitude if not lat or not lng: continue nearby = Playground.select().where( Playground.latitude.between( lat - SEARCH_DISTANCE, lat + SEARCH_DISTANCE ), Playground.longitude.between( lng - SEARCH_DISTANCE, lng + SEARCH_DISTANCE
def playground_create(): context = make_context() context['features'] = Playground.features_form() return render_template('create.html', **context)
def _prepare_email(revision_group): revisions = Revision.select().where(Revision.revision_group == int(revision_group)) context = {} context['base_url'] = '%s/playground/' % app_config.S3_BASE_URL context['total_revisions'] = revisions.count() context['deletes'] = {} context['deletes']['playgrounds'] = [] context['deletes']['total_revisions'] = 0 context['inserts'] = {} context['inserts']['playgrounds'] = [] context['inserts']['total_revisions'] = 0 context['updates'] = {} context['updates']['playgrounds'] = [] context['updates']['total_revisions'] = 0 inserts = revisions.where(Revision.action == 'insert') if inserts.count() > 0: context['inserts']['total_revisions'] = inserts.count() for revision in inserts: p = Playground.get(slug=revision.playground.slug) playground_dict = p.__dict__['_data'] playground_dict['display_name'] = p.display_name playground_dict['site_url'] = '%s/playground/%s.html' % (app_config.S3_BASE_URL, revision.playground.slug) playground_dict['revision_group'] = int(revision_group) playground_dict['headers'] = revision.get_headers() playground_dict['feature_count'] = int(p.feature_count) nearby = p.nearby(3) playground_dict['nearby'] = [] for n in nearby: if n.distance < 0.5: playground_dict['nearby'].append(n) context['inserts']['playgrounds'].append(playground_dict) context['inserts']['playgrounds'] = sorted(context['inserts']['playgrounds'], key=lambda p: p['name']) deletes = revisions.where(Revision.action == 'delete-request') if deletes.count() > 0: context['deletes']['total_revisions'] = deletes.count() for revision in deletes: p = Playground.get(slug=revision.playground.slug) playground_dict = playground_dict = p.__dict__['_data'] playground_dict['display_name'] = p.display_name playground_dict['site_url'] = '%s/playground/%s.html' % (app_config.S3_BASE_URL, revision.playground.slug) playground_dict['delete_url'] = '%s/delete-playground/%s/' % (app_config.SERVER_BASE_URL, revision.playground.slug) playground_dict['revision_group'] = int(revision_group) for item in json.loads(revision.log): if item.get('field', None) == "reason": playground_dict['text'] = cgi.escape(item.get('to')) playground_dict['headers'] = revision.get_headers() context['deletes']['playgrounds'].append(playground_dict) context['deletes']['playgrounds'] = sorted(context['deletes']['playgrounds'], key=lambda p: p['name']) updates = revisions.where(Revision.action == 'update') if updates.count() > 0: context['updates']['total_revisions'] = updates.count() updated_playgrounds = Set([]) for revision in updates: updated_playgrounds.add(revision.playground.slug) for playground_slug in updated_playgrounds: p = Playground.get(slug=playground_slug) playground_dict = p.__dict__['_data'] playground_dict['display_name'] = p.display_name playground_dict['site_url'] = '%s/playground/%s.html' % (app_config.S3_BASE_URL, playground_slug) playground_dict['revisions'] = [] for revision in updates: if revision.playground.id == p.id: revision_dict = {} revision_dict['revision_group'] = revision_group revision_dict['fields'] = revision.get_log() revision_dict['headers'] = revision.get_headers() playground_dict['revisions'].append(revision_dict) context['updates']['playgrounds'].append(playground_dict) context['updates']['playgrounds'] = sorted(context['updates']['playgrounds'], key=lambda p: p['name']) with open('templates/_email.html', 'rb') as read_template: payload = Template(read_template.read()) return payload.render(**context)
def process_update(record): """ Process a single update record from changes.json. """ playground_id = record['playground']['id'] # First, capture the old data from this playground. old_data = Playground.get(id=playground_id).__dict__['_data'] # This is an intermediate data store for this record. record_dict = {} # Loop through each of the key/value pairs in the playground record. for key, value in record['playground'].items(): # Ignore some keys because they aren't really what we want to update. if key not in ['id', 'features']: # Update the record_dict with our new key/value pair. record_dict[key] = value # Run the update query against this playground. # Pushes any updates in the record_dict to the model. playground = Playground.get(id=playground_id) if (record_dict): for k, v in record_dict.items(): setattr(playground, k, v) playground.save() # Set up the list of old features. # We're going to remove them all. # We'll re-add anything that stuck around. old_features = [] # Append the old features to the list. for feature in PlaygroundFeature.select().where(PlaygroundFeature.playground == playground_id): old_features.append(feature.slug) # Delete any features attached to this playground. PlaygroundFeature.delete().where(PlaygroundFeature.playground == playground_id).execute() # Check to see if we have any incoming features. try: features = record['playground']['features'] except KeyError: features = [] for slug in features: PlaygroundFeature.create( slug=slug, playground=playground ) # Now, let's set up some revisions. # Create a list of revisions to apply. revisions = [] # Our old data was captured up top. It's called old_ # This is the new It's just the record_dict from above. new_data = record_dict # Loop over the key/value pairs in the new data we have. for key, value in new_data.items(): # Now, if the old data and the new data don't match, let's make a revision. if old_data[key] != new_data[key]: # Set up an intermediate data structure for the revision. revision_dict = {} # Set up the data for this revision. revision_dict['field'] = key revision_dict['from'] = old_data[key] revision_dict['to'] = new_data[key] # Append it to the revisions list. revisions.append(revision_dict) # Let's get started on features. # First, let's figure out the new features coming from the Web. try: # If there are any new features, create a list for them. new_features = record['playground']['features'] except: # Otherwise, empty list. new_features = [] # First case: If the list of old and new features is identical, don't do anything. if old_features != new_features: # So there's a difference between the old and new feature lists. # Since the Web can both add new features and remove old features, # we have to prepare for each path. # First, let's loop over the list of features that are available. for feature in copytext.COPY.feature_list: slug = feature['key'] # If the slug is in the old feature set, let's check it against the new. if slug in old_features: # If it's not in the new feature set but IS in the old feature set, # let's append a revision taking it from 1 to 0. if slug not in new_features: revisions.append({"field": slug, "from": 1, "to": 0}) # Similarly, if the slug in the new feature set, let's check it agains the old. if slug in new_features: # If it's not in the old_feature set but IS in the new feature set, # let's append a revision taking it from 0 to 1. if slug not in old_features: revisions.append({"field": slug, "from": 0, "to": 1}) return playground, revisions