def test_expects_separated_reports(self, webmock): # Assume test_dir = pathjoin(AUDITS_DIR, "sub-domain-com") self.assertPathDoesNotExist(test_dir) # Arrange domain = 'sub.domain.com' webmock.get(requests_mock.ANY, text='ok') site = Site(domain) test_cases = [ # audit_type, expected_report_designator ("design", "design"), ("code", "code"), (None, "all") ] for audit_type, expected_report_designator in test_cases: site.audit_type = audit_type audit = AxeSiteAudit(site) # Act csv_path = audit.write_violations_to_csv() # Assert filename_template = "sub-domain-com-site-{}-violations.csv" expected_filename = filename_template.format(expected_report_designator) expected_path = pathjoin(test_dir, expected_filename) self.assertEqual(expected_path, csv_path)
def test_site_validation(self): site = Site() self.assertFalse( site.is_valid(), 'Default site values should not validate due to empty name and slug' ) site.name = "Valid Site Name" site.slug = 'valid-site-name' self.assertTrue(site.is_valid(), 'These hand-coded values should be valid') # Test some known-bad slugs bad_slugs = [ 'valid site name', # no whitespace 'valid--site--name', # no repeated hyphens 'Valid-Site-Name', # No capital letters 'chris\'-awesome-site', # No apostrophes ] for bad_slug in bad_slugs: site.name = 'Valid Site Name' site.slug = bad_slug self.assertFalse( site.is_valid(), 'This slug should have been invalid: \'%(slug)\'' % site.slug)
def post(self): try: if Site.find_by_sitename(request.json["site_name"]): return { 'message': 'Site {} already exists'.format(request.json["username"]) } new_site = Site(site_name=request.json['site_name'], city=request.json['city'], site_address=request.json['site_address'], longitude=request.json['longitude'], latitude=request.json['latitude'], number_of_string=request.json['number_of_string'], total_capacity=request.json['total_capacity']) db.session.add(new_site) db.session.commit() return { 'message': 'Site {} was created'.format(new_site.site_name) } except: return {'message': 'Something went wrong'}, 500
def test_expects_violations_in_csv(self, webmock): # Arrange test_dir = pathjoin(AUDITS_DIR, "sub-domain-com") violations_csv_path = pathjoin(test_dir, "sub-domain-com.csv") webmock.get(requests_mock.ANY, text='ok') domain = 'sub.domain.com' site = Site(domain) page = Page(site) audit = AxeSiteAudit(site) source = 'test' identifier = 'test-error' severity = 'low' violation = Violation( page=page, source=source, identifier=identifier, severity=severity ) violation.kind = "error" violation.help = "Error must be fixed" violation.help_url = "https://help.com" violation.html = "<p>Test</p>" violation.failure = "This is incorrect" # Act audit.write_to_violation_csv(violations_csv_path, [violation]) with open(violations_csv_path, 'r') as file: csv_rows = list(csv.reader(file)) row_count = len(csv_rows) # Assert self.assertEqual(row_count, 2) self.assertEqual(csv_rows[0][0], "page_url") self.assertEqual(csv_rows[1][8], violation.failure)
def home(language='es'): site = Site(language, splitByLanguage('Inicio | Home', language)) site.blowUp = True collection = Collection(site) items = collection.asItems() return render_template('home.html', site=site, items=items)
def test_auction_results(self): """Test auction bids result calculation""" bids = [ Bid(Bidder('BRANDON', -0.001), 'banner', 50), Bid(Bidder('ASEMA', -0.0432), 'banner', 51) ] site = Site('test.com', [bids[0].bidder, bids[1].bidder], 40) auction = Auction(site, [AdUnit.banner], bids) actual = auction.get_max_bids() self.assertEqual(actual['banner'].bid, 50, 'Invalid Auction')
def respond(self): site_id = str(uuid.uuid4())[:8] site_key = uuid.uuid4() new_site = Site(site_id, site_key) db.session.add(new_site) db.session.commit() response = {'site_id': site_id, 'site_key': site_key} return render_template('template_signup.html', context=response)
def test_expects_new_site_from_url(self): # Arrange url = 'http://sub.domain.com/path?q=foo' # Act site = Site(url) # Assert self.assertEqual(url, site.url) self.assertEqual('http', site.scheme) self.assertEqual('http://sub.domain.com', site.base_url)
def test_auction_results_performance(self): """Evaluate response time for auction results""" bids = [ Bid(Bidder('BRANDON', -0.001), 'banner', 50), Bid(Bidder('ASEMA', -0.0432), 'banner', 51) ] site = Site('test.com', [bids[0].bidder, bids[1].bidder], 40) auction = Auction(site, [AdUnit.banner], bids) time_start = time.time() auction.get_max_bids() time_end = time.time() time_span = time_end - time_start self.assertLess(time_span, 1, 'Results Method Performance Inadequate')
def test_expects_new_axe_site_audit(self, webmock): # Arrange webmock.get(requests_mock.ANY, text='ok') domain = 'sub.domain.com' site = Site(domain) # Act site_audit = AxeSiteAudit(site) # Assert self.assertIsInstance(site_audit, AxeSiteAudit) self.assertEqual(site, site_audit.site) self.assertListEqual([], site_audit.violations)
def test_expects_new_axe_page_audit(self): # Arrange url = 'https://sub.domain.com' site = Site(url) page = Page(site) # Act page_audit = AxePageAudit(page) # Assert self.assertIsInstance(page_audit, AxePageAudit) self.assertEqual(url, page_audit.url) self.assertListEqual([], page_audit.violations)
def test_create_new(self): # Keep track of the initial set, so that we can discern which changes were caused by this test starting_sites = Site.all() site = Site() site.name = 'Chris\' Awesome Test Site #1' site.slug = 'chris-awesome-test-site-1' site.xstreet = '1234 Fake Street' site.xcity = 'Universal City' site.xzip = '78005' # site.latitude = Decimal('175.1') # site.longitude = Decimal('176.0') site.latitude = '175.1' site.longitude = '176.0' site.opentime = '10am' site.closetime = '6pm' site.days = 'M-T' site.sitecoordinator = None # TODO: test variations of this too site.sitetype = 'Tax Preparation' site.is_open = True result = site.save() self.assertEqual(200, result, 'Failed to save the new site') # Validate that the record was indeed written out to the DB self.assertEqual( len(starting_sites) + 1, len(Site.all()), 'No Site record was actually written') # # Now fetch the site data back out site2 = Site.find('chris-awesome-test-site-1') self.assertEqual('Chris\' Awesome Test Site #1', site2.name) self.assertEqual('chris-awesome-test-site-1', site2.slug) self.assertEqual("1234 Fake Street", site2.xstreet) self.assertEqual('Universal City', site2.xcity) self.assertEqual('78005', site2.xzip) # self.assertEqual(Decimal('175.1'), site2.latitude) # self.assertEqual(Decimal('176.0'), site2.longitude) self.assertEqual('175.1', site2.latitude) self.assertEqual('176.0', site2.longitude) self.assertEqual('10am', site2.opentime) self.assertEqual('6pm', site2.closetime) self.assertEqual('M-T', site2.days) self.assertEqual(None, site2.sitecoordinator) self.assertEqual(True, site2.is_open) # Delete the newly created site self.assertEqual(200, site2.delete()) self.assertEqual( len(starting_sites), len(Site.all()), 'The site creation test failed to clean up after itself')
def item(language, type, country, year, item): site = Site(language, item) site.blowUp = True site.lightSlider = True collection = Collection(site) item = Item( language, collection.find(type + '/' + country + '/' + year + '/' + item, 'Link')) site.title = item.name() site.permalink = item.link() return render_template('item.html', site=site, item=item)
def test_expects_new_site_from_domain(self, webmock): # Arrange domain = 'sub.domain.com' webmock.get(requests_mock.ANY, text='ok') # Act site = Site(domain) # Assert self.assertIsInstance(site, Site) self.assertEqual('https', site.scheme) self.assertEqual('sub', site.subdomain) self.assertEqual('domain', site.domain) self.assertEqual('com', site.tld) self.assertEqual('sub.domain.com', site.fqdn) self.assertEqual('https://sub.domain.com', site.url)
def test_as_item_returns_expected_results(self): data = googleData() site = Site('es', '') collection = Collection(site) collection.googleData = data # Assert data is turned right into a list of Items. assert collection.asItems()[1].name() == 'Nombre del objeto' # Assert data is skipped if all the fields are empty. assert collection.asItems()[0].name() == 'Nombre del objeto 2' # Assert empty list is returned when there's no data. collection.googleData = [] assert collection.asItems() == []
def post(self, user, fields, **kwargs): """ Create a new Site with a handle field """ h = fields["handle"] if not isinstance(h, str) or len(h) == 0: return { "message": "handle must be a non-empty string" }, 400 new_site = Site(**fields) new_site.user_id = user.id new_site.set_first_handle(h) return new_site.to_dict(), 201
def main(): current_page = args.page_number try: print("Processing page {}...".format(current_page)) result = make_request(args, current_page) if 'error' in result: exit() res = result['response']['search']['offers']['entities'] # res = list(filter(lambda r: r['building'].get('buildingId'), res)) for e in convert(res): session.merge(Author(*e['author'])) if e['site']: session.merge(Site(*e['site'])) if e['building']: session.merge(Building(*e['building'])) nbid = None else: nb = session.merge(NewBuilding(*e['new_building'])) session.commit( ) # todo it's working now only for new buildings autoinc IDs nbid = nb.id o = (nbid, ) + e['offer'] session.merge(Offer(*o)) # session.merge(Photo(*e['photo'])) # for ent in res: # session.merge(Offer( # ent['offerId'], # ent['active'], # ent['area']['value'], # ent['building'].get('houseId') # )) session.commit() current_page += 1 print("Waiting {0} seconds".format(args.delay)) time.sleep(args.delay) except Exception as e: print(e) print("Unknown exception, waiting 60 seconds.") time.sleep(60) except KeyboardInterrupt: print("Finishing...") exit() print("Done")
def test_delete_site(self): # Make note of the starting site count starting_sites = Site.all() # Create a Site manually first so that we isolate this test to only the Update API raw_site = Site() raw_site.name = 'tc_lambda_sites_test_delete_site' raw_site.address = '123 tc_lambda_sites_test_delete_site street' raw_site.availability_status = 'Green' raw_site.hours = '12-12' raw_site.is_open = True raw_site.save() self.assertEqual("123 tc_lambda_sites_test_delete_site street", raw_site.address) self.assertEqual('tc_lambda_sites_test_delete_site', raw_site.name) self.assertEqual('Green', raw_site.availability_status) self.assertEqual(True, raw_site.is_open) self.assertEqual('12-12', raw_site.hours) event = { 'httpMethod': 'DELETE', 'path': '/site/tc_lambda_sites_test_delete_site', 'resource': '/site/{sitename}', 'pathParameters': { 'sitename': 'tc_lambda_sites_test_delete_site' }, 'body': None, } response = LambdaApiHandler.site_apis(event) self.assertEqual('', response['body']) self.assertEqual('200', response['statusCode']) self.assertEqual( len(starting_sites), len(Site.all()), 'The Delete Site API failed to destroy the record it created.') # Fetch the site separately and validate the content site = Site.find('tc_lambda_sites_test_delete_site') self.assertIsNone(site) # Cleanup. if raw_site != None: raw_site.delete() # Cleanup validation self.assertEqual(len(starting_sites), len(Site.all()), "Failed to clean up the site we created!")
def test_expects_violations_csv(self, webmock): # Assume test_dir = pathjoin(AUDITS_DIR, "sub-domain-com") self.assertPathDoesNotExist(test_dir) # Arrange domain = 'sub.domain.com' webmock.get(requests_mock.ANY, text='ok') site = Site(domain) audit = AxeSiteAudit(site) # Act csv_path = audit.write_violations_to_csv() # Assert expected_path = pathjoin(test_dir, "sub-domain-com-site-all-violations.csv") self.assertEqual(expected_path, csv_path)
def test_find_item_returns_expected_results(self): data = googleData() site = Site('es', '') collection = Collection(site) collection.googleData = data # Assert expected item by Spanish name. assert collection.find('Nombre del objeto 2') == data[2] # Assert expected item by English name. assert collection.find('Item name 2') == data[2] # Assert empty dict by wrong name. assert collection.find('Wrong') == {} # Assert expected value from a different field. assert collection.find('nombre-del-objeto-2', 'Link') == data[2] # Assert empty dict from a different field. assert collection.find('Wrong', 'Link de la Ceca | Mint\'s Link') == {}
def respond(self): site_id = str(uuid.uuid4())[:8] site_key = uuid.uuid4() new_site = Site(site_id, site_key) db.session.add(new_site) db.session.commit() domainName = os.environ["DOMAIN_NAME"] collectorPath = os.environ["COLLECTOR_PATH"] protocol = os.environ["PROTOCOL"] dashboardPath = os.environ["DASHBOARD_PATH"] domain = protocol + "://" + domainName + collectorPath dashboard = protocol + "://" + domainName + dashboardPath response = { 'site_id': site_id, 'site_key': site_key, 'endpoint': domain, 'dashboard': dashboard } return render_template('template_landing.html', context=response)
def test_create_new(self): site = Site() site.name = 'test_site_2' site.address = '234 Fake Street' site.availability_status = 'Green' site.hours = '1-2' site.is_open = True result = site.save() self.assertTrue(result, 'Failed to save the new site') # Validate that the record was indeed written out to the DB self.assertEqual(2, len(Site.all())) # Now fetch the site data back out site2 = Site.find('test_site_2') self.assertEqual("234 Fake Street", site.address) self.assertEqual('test_site_2', site.name) self.assertEqual('Green', site.availability_status) self.assertEqual(True, site.is_open) self.assertEqual('1-2', site.hours) # Delete the newly created site site2.delete() self.assertEqual(1, len(Site.all()))
def interestingLinks(language='es'): site = Site( language, splitByLanguage('Links Interesantes | Interesting Links', language)) return render_template('interesting-links.html', site=site)
def form(language='es'): site = Site( language, splitByLanguage( 'Añadir un Objeto a la Colección | Add an Item to the Collection', language)) collection = Collection(site) # Ensure the form is being sent. if request.method == 'POST': # Upload the images first to SmartFile and once we get the URLs for the images # we will update our Google Spreadsheet to include that URLs. if not request.files['obverse'] or not request.files['reverse']: return render_template('form-error.html', site=site) else: # Builds the folder structure to store the image. filePath = 'Items/' + splitByLanguage( request.form.get('type'), 'en') + '/' + splitByLanguage( request.form.get('country'), 'en') + '/' + splitByLanguage( request.form.get('date'), 'en') # Creates the folder if needed. collection.smartFileClient.put('/path/oper/mkdir/' + filePath) # Generates the file name based on the item name. fileName = stringToURL( splitByLanguage(request.form.get('name'), 'en')) # Updates the file name to be uploaded with the correct name. obverse = request.files['obverse'] obverse.filename = fileName + '-obverse.jpg' reverse = request.files['reverse'] reverse.filename = fileName + '-reverse.jpg' # Uploads the Obverse and Reverse images. collection.smartFileClient.post('/path/data/' + filePath, file=(obverse.filename, obverse)) collection.smartFileClient.post('/path/data/' + filePath, file=(reverse.filename, reverse)) # Generates the href for the images to be saved in the Google Spreadsheet. obverseURL = collection.smartFileClient.post('/link', path=filePath + '/' + obverse.filename) reverseURL = collection.smartFileClient.post('/link', path=filePath + '/' + reverse.filename) if ('href' in obverseURL and 'href' in reverseURL): # Insert our data in the Google Spreadsheet. insertData = [ request.form.get('type'), request.form.get('name'), obverseURL['href'] + request.files['obverse'].filename, reverseURL['href'] + request.files['reverse'].filename, request.form.get('country'), request.form.get('denomination'), request.form.get('date'), request.form.get('diameter'), request.form.get('composition'), request.form.get('series'), request.form.get('serial'), request.form.get('grading'), request.form.get('value'), request.form.get('cost'), prepareItemLink(request.form.get('type'), request.form.get('country'), request.form.get('date'), request.form.get('name')), request.form.get('mint'), ] rowCount = len(collection.googleData) # It is being inserted with +2 in the row Count to increase the total +1, but also taking into account the header row. collection.googleSheet.insert_row(insertData, rowCount + 2) return render_template('form-success.html', site=site) else: return render_template('form-error.html', site=site) return render_template('form.html', site=site)
from models.site import Site # we create an instance of site model and call its run method to run the whole project if __name__ == "__main__": site = Site() site.run()
def test_update_site(self): # Make note of the starting site count starting_sites = Site.all() # Create a Site manually first so that we isolate this test to only the Update API raw_site = Site() raw_site.name = 'tc_lambda_sites_test_update_site' raw_site.address = '123 tc_lambda_sites_test_update_site street' raw_site.availability_status = 'Green' raw_site.hours = '12-12' raw_site.is_open = True raw_site.save() self.assertEqual("123 tc_lambda_sites_test_update_site street", raw_site.address) self.assertEqual('tc_lambda_sites_test_update_site', raw_site.name) self.assertEqual('Green', raw_site.availability_status) self.assertEqual(True, raw_site.is_open) self.assertEqual('12-12', raw_site.hours) event = { 'httpMethod': 'POST', 'path': '/site/tc_lambda_sites_test_update_site', 'resource': '/site/{sitename}', 'pathParameters': { 'sitename': 'tc_lambda_sites_test_update_site' }, 'body': json.dumps({ "Site": { "name": raw_site.name, "address": "123 tc_lambda_sites_test_update_site parkway", "hours": raw_site.hours, "is_open": raw_site.is_open, "availability_status": raw_site.availability_status } }), } response = LambdaApiHandler.site_apis(event) self.assertEqual('', response['body']) self.assertEqual('200', response['statusCode']) self.assertEqual( len(starting_sites) + 1, len(Site.all()), 'Somehow the Update Site API created a new entry') # Fetch the site separately and validate the content site = Site.find('tc_lambda_sites_test_update_site') self.assertIsNotNone(site) self.assertEqual('tc_lambda_sites_test_update_site', site.name) self.assertEqual("123 tc_lambda_sites_test_update_site parkway", site.address) self.assertEqual('Green', site.availability_status) self.assertEqual(True, site.is_open) self.assertEqual('12-12', site.hours) # Cleanup. if raw_site != None: raw_site.delete() # Cleanup validation self.assertEqual(len(starting_sites), len(Site.all()), "Failed to clean up the site we created!")
# google_photo['photos'] ? # record['image'] = record['image'] = google_photo.get('photos', [ 'https://images.unsplash.com/photo-1531944213227-db53a6d0f3bd?ixid=MXwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHw%3D&ixlib=rb-1.2.1&auto=format&fit=crop&w=1941&q=80' ]) return record # https://maps.googleapis.com/maps/api/place/photo?photoreference=PHOTO_REFERENCE&sensor=false&maxheight=MAX_HEIGHT&maxwidth=MAX_WIDTH&key=YOUR_API_KEY filtered_with_photo = (list(map(google_photo_mapper, filtered_with_id))) # pprint.pprint(filtered_with_photo) sites_to_make = [] for site in filtered_with_photo: sites_to_make.append(Site(**site)) # alhambra = Site( # region="Europe and North America", # name="Alhambra, Generalife and Albayzín, Granada", # latitude=37.17667, # longitude=-3.59444, # country="Spain", # province="Province of Granada, Autonomous Community of Andalusia", # description="Rising above the modern lower town, the Alhambra and the Albaycín, situated on two adjacent hills, form the medieval part of Granada. To the east of the Alhambra fortress and residence are the magnificent gardens of the Generalife, the former rural residence of the emirs who ruled this part of Spain in the 13th and 14th centuries. The residential district of the Albaycín is a rich repository of Moorish vernacular architecture, into which the traditional Andalusian architecture blends harmoniously.", # thumbnail_id='9cf243e9fa6b0b572f0a4028e7a8fba7', # image='https://upload.wikimedia.org/wikipedia/commons/thumb/d/d8/Alhambradesdegeneralife.jpg/1024px-Alhambradesdegeneralife.jpg', # weblink='https://whc.unesco.org/en/list/314', # date_inscribed=1984, # user=balta
def _create_site(site_data, bidders_input): """Creates a site object and returns site""" name = site_data.name bidders = [bidders_input[name] for name in site_data.bidders] floor = site_data.floor return Site(name, bidders, floor)
def post(self, fields, **kwargs): """ This endpoint signs in users with an apple_token field. Apple tokens sometimes contain emails (when it's a new user) and otherwise don't "sub" is Apple's user PK Name is passed to if it's available and should be treated as optional If the user's email exists, the existing user object will be returned If the user's email does not exist, a new User will be saved Either way, a new token will be issued """ try: # Validate the apple_token passed in (retrieves apple user) apple_user = retrieve_user(fields["apple_token"]) except Exception as e: # Handle exceptions return {"message": str(e)}, 400 is_new_user = True if apple_user.full_user: # Make a new User user = User(email=apple_user.email, name=fields["name"], apple_id=apple_user.id) try: # Save the new user to the DB user.save() except IntegrityError: # That user already exists, rollback is_new_user = False db.session().rollback() # Find existing user user = User.query.filter_by(apple_id=apple_user.id).first() pass else: is_new_user = False user = User.query.filter_by(apple_id=apple_user.id).first() # # Mark previous tokens expired # TODO: This will expire shortcuts tokens which is bad # db.session.query(AuthToken).filter_by( # user_id=user.id).update({AuthToken.expired: True}) # db.session.commit() # Send back a new auth token new_token = AuthToken(user_id=user.id) new_token.save() return_payload = {"user": user.to_dict(), "token": new_token.to_dict()} status_code = 200 # Create a user's first site if is_new_user: status_code = 201 new_site = Site(user_id=user.id) new_site.set_first_handle(user.name) return_payload["sites"] = list(new_site.to_dict()) else: # Look up existing sites sites = Site.query.filter_by(user_id=user.id) return_payload["sites"] = list(map(lambda s: s.to_dict(), sites)) return return_payload, status_code