def process_url_to_scan(deployment, url_to_scan): logger = logging.getLogger(__name__) try: tmp_dir = tempfile.mkdtemp() logger.debug('Temp dir created, running headless-lighthouse') image = environment.get_env(environment.LIGHTHOUSE_IMAGE) output = process.run_with_output(f'docker run -e URL={url_to_scan} ' f'-v {tmp_dir}:/report ' f'{image}') logger.debug('Output from lighthouse was: "%s"', output) app_name = deployment_util.get_application_name(deployment) commit = deployment_util.get_application_version(deployment) commit = commit.split('_')[1] url_path = urlparse(url_to_scan).path.replace('/', '-') report_path = f'{tmp_dir}/{app_name}_{commit}_{url_path}' os.rename(f'{tmp_dir}/report.report.html', f'{report_path}.html') os.rename(f'{tmp_dir}/report.report.json', f'{report_path}.json') logger.debug(f'Report path is {report_path}.html') #box_link = upload_to_box(report_path, deployment) if environment.get_env(environment.LIGHTHOUSE_STORAGE_CONN_STRING): upload_to_storage(deployment, report_path, url_path) send_info_to_slack(deployment, url_to_scan) finally: if os.path.exists(tmp_dir) and os.path.isdir(tmp_dir): shutil.rmtree(tmp_dir)
class TestSchemaValidation(unittest.TestCase): @unittest.skipIf(environment.get_env(environment.SKIP_VALIDATION_TESTS), 'SKIP_VALIDATION_TESTS set') def test_validate_deployment_samples(self): furano_url = environment.get_env_with_default_value(environment.VALIDATE_DEPLOYMENT_URL, 'https://app.kth.se/jsonschema/dizin/deployment') for sample in mock_data.get_deployment_samples(): del sample["expected-enriched-values"] result = requests.post(furano_url, json=sample, allow_redirects=False) self.assertEqual(result.json(), {}) self.assertEqual(result.status_code, 200) @unittest.skipIf(environment.get_env(environment.SKIP_VALIDATION_TESTS), 'SKIP_VALIDATION_TESTS set') def test_validate_deployment_samples_enriched(self): furano_url = environment.get_env_with_default_value(environment.VALIDATE_DEPLOYMENT_URL, 'https://app.kth.se/jsonschema/alvares/deployment') for sample in mock_data.get_deployment_samples(): sample = deployment_enricher.enrich(sample) del sample["expected-enriched-values"] result = requests.post(furano_url, json=sample, allow_redirects=False) self.assertEqual(result.json(), {}) self.assertEqual(result.status_code, 200) @unittest.skipIf(environment.get_env(environment.SKIP_VALIDATION_TESTS), 'SKIP_VALIDATION_TESTS set') def test_validate_error(self): validation_url = environment.get_env_with_default_value( environment.VALIDATE_DEPLOYMENT_URL, 'https://app.kth.se/jsonschema/dizin/error' ) deployment_json = mock_data.get_error() result = requests.post(validation_url, json=deployment_json, allow_redirects=False) self.assertEqual(result.json(), {}) self.assertEqual(result.status_code, 200) @unittest.skipIf(environment.get_env(environment.SKIP_VALIDATION_TESTS), 'SKIP_VALIDATION_TESTS set') def test_validate_recommendation(self): validation_url = environment.get_env_with_default_value( environment.VALIDATE_DEPLOYMENT_URL, 'https://app.kth.se/jsonschema/dizin/recommendation' ) for sample in mock_data.get_recommendation_samples(): del sample["expected-enriched-values"] result = requests.post(validation_url, json=sample, allow_redirects=False) self.assertEqual(result.json(), {}) self.assertEqual(result.status_code, 200)
def get_headers(): api_key = environment.get_env(environment.FLOTTSBRO_API_KEY) if not api_key: LOG.error('No header env FLOTTSBRO_API_KEY specified ') return None return {'api_key': api_key}
def handle_error(error): global LOG # pylint: disable=W0603 web_hook = environment.get_env(environment.SLACK_WEB_HOOK) overridden = send_to_override(web_hook, error) if not overridden: error_has_channels = send_to_error_channels(web_hook, error) if not error_has_channels: send_to_environment_channels(web_hook, error)
def should_monitor_cluster(deployment): global LOG # pylint: disable=W0603 cluster_ok = (deployment_util.get_cluster(deployment) in environment.get_env(environment.UTR_CLUSTERS)) if not cluster_ok: LOG.debug('Cluster "%s" not in UTR_CLUSTERS, skipping UpTimeRobot integration', deployment_util.get_cluster(deployment)) return cluster_ok
def upload_to_box(report_path, deployment): box_auth_string = environment.get_env(environment.BOX_AUTH_JSON) box_auth_json = json.loads(box_auth_string.replace("'", "")) box_sdk = JWTAuth.from_settings_dictionary(box_auth_json) client = BoxClient(box_sdk) file_name = create_file_name(deployment) # Folder id 0 is the root folder box_file = client.folder('0').upload(report_path, file_name) return box_file.get_shared_link(access='open')
def app_is_excluded(deployment): global LOG # pylint: disable=W0603 app_excluded = (deployment_util.get_application_name(deployment) in environment.get_env(environment.UTR_EXCLUDED_APPS)) if app_excluded: LOG.debug('Application "%s" in UTR_EXCLUDED_APPS, ' 'skipping UpTimeRobot integration', deployment_util.get_application_name(deployment)) return app_excluded
def call_endpoint(api_url, payload): global LOG # pylint: disable=W0603 if api_url.startswith('/'): base_url = environment.get_env_with_default_value(environment.UTR_API_BASE_URL, 'https://api.uptimerobot.com/v2') api_url = f'{base_url}{api_url}' LOG.debug('Calling endpoint "%s" with payload "%s"', api_url, payload) try: api_key = environment.get_env(environment.UTR_API_KEY) payload['api_key'] = api_key response = requests.post(api_url, data=payload) LOG.debug('Calling url "%s", got response was "%s"', api_url, response.text) return response.json() except (HTTPError, ConnectTimeout, RequestException) as request_ex: LOG.error('Could not create UpTimeRobot monitor: "%s"', request_ex)
def handle_deployment(deployment): logger = logging.getLogger(__name__) if deployment_util.has_zero_replicas(deployment): logger.debug('Skipping testAccessibility, no replicas') return deployment if not deployment_util.get_test_accessibility(deployment): logger.debug('testAccessibility not set - skipping Lighthouse') return deployment if not environment.get_env(environment.SLACK_TOKEN): logger.info('No SLACK_TOKEN env provided. Cant run lighthouse') return deployment urls_to_scan = get_urls_to_scan(deployment) if urls_to_scan: for scan_url in urls_to_scan: process_url_to_scan(deployment, scan_url) return deployment
def get_payload(channel, deployment, report_path, scanned_url): slack_token = environment.get_env(environment.SLACK_TOKEN) return { 'filename': create_file_name(deployment), 'token': slack_token, "username": "******", 'channels': channel, 'filetype': 'binary', 'initial_comment': f'Google Lighthouse accessibility report {deployment_util.get_friendly_name(deployment)}.', 'title': (f'Score for {scanned_url}: {0:.2f}/4.0'.format( parse_total_score(report_path))) }
def upload_to_storage(deployment, report_path, url_path): logger = logging.getLogger(__name__) logger.info('Lighthouse connections string found, uploading report') connect_str = environment.get_env( environment.LIGHTHOUSE_STORAGE_CONN_STRING) client = BlobServiceClient.from_connection_string(connect_str) container = 'no team' if 'team' in deployment: container = deployment['team'] try: logger.debug(f'Using container "{container}"') client.create_container(container) except: logger.debug('Container already exists') clean_old_blobs(deployment, client, container, url_path) html_path = f'{report_path}.html' json_path = f'{report_path}.json' filename = os.path.basename(html_path) logger.debug('Generated filename "%s"', filename) blob_properties = get_blob_properties(filename) blob_client = client.get_blob_client(container=container, blob=blob_properties) with open(html_path, "rb") as data: try: blob_client.upload_blob(data) blob_client.set_http_headers( content_settings=blob_properties.content_settings) except: logger.debug('Couldnt upload file. Does it already exist?') filename = os.path.basename(json_path) blob_properties = get_blob_properties(filename) blob_client = client.get_blob_client(container=container, blob=blob_properties) with open(json_path, "rb") as data: try: blob_client.upload_blob(data) blob_client.set_http_headers( content_settings=blob_properties.content_settings) except: logger.debug('Couldnt upload file. Does it already exist?') logger.info('Report upload complete')
def send_file_to_slack(channel, deployment, report_path, scanned_url): logger = logging.getLogger(__name__) logger.debug('Starting upload of lighthouse report to Slack') api_base_url = environment.get_env(environment.SLACK_API_BASE_URL) api_url = f'{api_base_url}/files.upload' #headers = {'Content-type': 'multipart/form-data'} headers = {} payload = get_payload(channel, deployment, report_path, scanned_url) files = {'file': (report_path, open(report_path, 'rb'), 'binary')} logger.debug('File upload payload is: "%s"', payload) logger.debug('File data is: "%s"', files) try: logger.debug('Calling Slack with payload "%s"', payload) response = requests.post(api_url, files=files, data=payload, headers=headers) logger.debug('Response was "%s"', response.text) except (HTTPError, ConnectTimeout, RequestException) as request_ex: logger.error( 'Could not send slack lighthouse notification to channel "%s": "%s"', channel, request_ex)
def send_recommendation_to_slack(channel, payload): logger = logging.getLogger(__name__) logger.info('Sending recommendation to channel "%s"', channel) web_hook = environment.get_env(environment.SLACK_WEB_HOOK) slack_util.call_slack_endpoint(channel, web_hook, payload)
def send_to_override(web_hook, error): channel_override = environment.get_env(environment.SLACK_CHANNEL_OVERRIDE) if channel_override: send_error_to_slack(web_hook, channel_override, error) return error return None
def send_payload(channel, payload): web_hook = environment.get_env(environment.SLACK_WEB_HOOK) slack_util.call_slack_endpoint(channel, web_hook, payload)
def handle_deployment(deployment): global LOG # pylint: disable=W0603 web_hook = environment.get_env(environment.SLACK_WEB_HOOK) for channel in slack_util.get_deployment_channels(deployment): send_deployment_to_slack(web_hook, channel, deployment) return deployment