def check_db_connection(self): self.console.info("Checking connection to MongoDB...") try: self.db_client.server_info() except ServerSelectionTimeoutError: self.console.error( "Unable to connect to MongoDB! Please check if it is running and reachable on {}:{}. Aborting..." .format(db_config.get("host"), db_config.get("port"))) raise ConnectionError('cannot connect to MongoDB.')
def __init__(self, msf_handler): self.msf_handler = msf_handler self.console = ConsoleHandler("DBHandler") self.exploits = [] self.db_client = MongoClient(db_config.get("host"), db_config.get("port")) self.db = self.db_client.pyperpwn self.expl_coll = self.db.exploits self.cve_coll = self.db.cve self.vuln_coll = self.db.vulns self.expl_class_coll = self.db.classifications self.exec_status_coll = self.db.exec_status self.check_db_connection()
def can_user_access_testcase(testcase): """Checks if the current user can access the testcase.""" if has_access( fuzzer_name=testcase.fuzzer_name, job_type=testcase.job_type, need_privileged_access=testcase.security_flag): return True user_email = helpers.get_user_email() if testcase.uploader_email and testcase.uploader_email == user_email: return True # Allow owners of bugs to see associated test cases and test case groups. issue_id = testcase.bug_information or testcase.group_bug_information if not issue_id: return False itm = issue_tracker_utils.get_issue_tracker_manager(testcase) issue = itm.get_issue(int(issue_id)) if not issue: return False config = db_config.get() if config.relax_testcase_restrictions or _is_domain_allowed(user_email): return (any(utils.emails_equal(user_email, cc) for cc in issue.cc) or utils.emails_equal(user_email, issue.owner) or utils.emails_equal(user_email, issue.reporter)) return utils.emails_equal(user_email, issue.owner)
async def setup_db(self): database = db_config.get('database') db_name = self.request.headers.get('X-Company-Code', '').lower() # 兼容config 里指定database 的模式,只有database 为空时才切换连接多数据库 if (not database) and db_name: self.env.db.pool = await self.env.db.create_pool(database=db_name)
def check_db(self): db_count = self.expl_coll.count_documents({}) msf_count = len(self.msf_handler.get_all_exploits()) self.console.info( "\t Found {} exploits in DB, while MSF currently offers {} in total" .format(db_count, msf_count)) if db_count + db_config.get("diff_range") >= msf_count: return True return False
def _create_client(self): """Return a client object for querying the issue tracker.""" config = db_config.get() credentials = json.loads(config.jira_credentials) jira_url = config.jira_url jira_client = jira.JIRA(jira_url, auth=(credentials['username'], credentials['password'])) return jira_client
def configure(force_enable=False): """Configure airplane mode and wifi on device.""" # The reproduce tool shouldn't inherit wifi settings from jobs. if environment.get_value('REPRODUCE_TOOL'): return # Airplane mode should be disabled in all cases. This can get inadvertently # turned on via gestures. disable_airplane_mode() # Need to disable wifi before changing configuration. disable() # Check if wifi needs to be enabled. If not, then no need to modify the # supplicant file. wifi_enabled = force_enable or environment.get_value('WIFI', True) if not wifi_enabled: # No more work to do, we already disabled it at start. return # Wait 2 seconds to allow the wifi to be enabled. enable() time.sleep(2) # Install helper apk to configure wifi. wifi_util_apk_path = os.path.join( environment.get_platform_resources_directory(), 'wifi_util.apk') if not app.is_installed(WIFI_UTIL_PACKAGE_NAME): app.install(wifi_util_apk_path) # Get ssid and password from admin configuration. if adb.is_gce(): wifi_ssid = 'VirtWifi' wifi_password = '' else: config = db_config.get() if not config.wifi_ssid: logs.log('No wifi ssid is set, skipping wifi config.') return wifi_ssid = config.wifi_ssid wifi_password = config.wifi_password or '' connect_wifi_command = ( 'am instrument -e method connectToNetwork -e ssid {ssid} ') if wifi_password: connect_wifi_command += '-e psk {password} ' connect_wifi_command += '-w {call_path}' output = adb.run_shell_command( connect_wifi_command.format( ssid=quote(wifi_ssid), password=quote(wifi_password), call_path=WIFI_UTIL_CALL_PATH)) if 'result=true' not in output: logs.log_error('Failed to connect to wifi.', output=output)
def create_db_session(): """ Generate a SQLAlchemy session object """ try: from config import db_config db_url = URL(drivername='mysql+mysqlconnector', username=db_config.get('user'), password=db_config.get('password'), host=db_config.get('host'), port=db_config.get('port'), database=db_config.get('name')) except ImportError: db_url = 'mysql+mysqlconnector://ispyb:[email protected]:3306/ispyb' print("Creating db connection to {}".format(db_url)) engine = create_engine(db_url) Session = sessionmaker(bind=engine) session = Session() return session
def configure_wifi_and_airplane_mode(wifi_enabled=False): """Configure airplane mode and wifi on device.""" # Airplane mode should be disabled in all cases. This can get inadvertently # turned on via gestures. adb.disable_airplane_mode() # GCE uses Ethernet, nothing to do here. if adb.is_gce(): return # Need to disable wifi before changing configuration. adb.disable_wifi() # Check if wifi needs to be enabled. If not, then no need to modify the # supplicant file. wifi_enabled = wifi_enabled or environment.get_value('WIFI', True) if not wifi_enabled: # No more work to do, we already disabled it at start. return config = db_config.get() if not config.wifi_ssid: # No wifi config is set, skip. return adb.enable_wifi() # Wait 2 seconds to allow the wifi to be enabled. time.sleep(2) wifi_util_apk_path = os.path.join( environment.get_platform_resources_directory(), 'wifi_util.apk') if not adb.is_package_installed(WIFI_UTIL_PACKAGE_NAME): adb.install_package(wifi_util_apk_path) connect_wifi_command = ( 'am instrument -e method connectToNetwork -e ssid {ssid} ') if config.wifi_password: connect_wifi_command += '-e psk {password} ' connect_wifi_command += '-w {call_path}' output = adb.run_adb_shell_command( connect_wifi_command.format( ssid=quote(config.wifi_ssid), password=quote(config.wifi_password), call_path=WIFI_UTIL_CALL_PATH)) if 'result=true' not in output: logs.log_error('Failed to connect to wifi.', output=output)
def can_user_access_testcase(testcase): """Checks if the current user can access the testcase.""" config = db_config.get() need_privileged_access = ( testcase.security_flag and not config.relax_security_bug_restrictions) if has_access( fuzzer_name=testcase.fuzzer_name, job_type=testcase.job_type, need_privileged_access=need_privileged_access): return True user_email = helpers.get_user_email() if testcase.uploader_email and testcase.uploader_email == user_email: return True # Allow owners of bugs to see associated test cases and test case groups. issue_id = testcase.bug_information or testcase.group_bug_information if not issue_id: return False itm = issue_tracker_utils.get_issue_tracker_manager(testcase) issue_id = int(issue_id) associated_issue = itm.get_issue(issue_id) if not associated_issue: return False # Look at both associated issue and original issue (if the associated one # is a duplicate of the original issue). issues_to_check = [associated_issue] if associated_issue.merged_into: original_issue = itm.get_original_issue(issue_id) if original_issue: issues_to_check.append(original_issue) relaxed_restrictions = ( config.relax_testcase_restrictions or _is_domain_allowed(user_email)) for issue in issues_to_check: if relaxed_restrictions: if (any(utils.emails_equal(user_email, cc) for cc in issue.cc) or utils.emails_equal(user_email, issue.owner) or utils.emails_equal(user_email, issue.reporter)): return True elif utils.emails_equal(user_email, issue.owner): return True return False
def add_test_accounts_if_needed(): """Add test account to work with GmsCore, etc.""" last_test_account_check_time = persistent_cache.get_value( constants.LAST_TEST_ACCOUNT_CHECK_KEY, constructor=datetime.datetime.utcfromtimestamp) needs_test_account_update = (last_test_account_check_time is None or dates.time_has_expired( last_test_account_check_time, seconds=ADD_TEST_ACCOUNT_CHECK_INTERVAL)) if not needs_test_account_update: return config = db_config.get() if not config: return test_account_email = config.test_account_email test_account_password = config.test_account_password if not test_account_email or not test_account_password: return adb.run_as_root() wifi.configure(force_enable=True) if not app.is_installed(ADD_TEST_ACCOUNT_PKG_NAME): logs.log('Installing helper apk for adding test account.') android_directory = environment.get_platform_resources_directory() add_test_account_apk_path = os.path.join(android_directory, ADD_TEST_ACCOUNT_APK_NAME) app.install(add_test_account_apk_path) logs.log('Trying to add test account.') output = adb.run_shell_command( 'am instrument -e account %s -e password %s -w %s' % (test_account_email, test_account_password, ADD_TEST_ACCOUNT_CALL_PATH), timeout=ADD_TEST_ACCOUNT_TIMEOUT) if not output or test_account_email not in output: logs.log('Failed to add test account, probably due to wifi issues.') return logs.log('Test account added successfully.') persistent_cache.set_value(constants.LAST_TEST_ACCOUNT_CHECK_KEY, time.time())
def sync_cf_revision_mappings(project, info): """Sync ClusterFuzz revision mappings.""" config = db_config.get() # Parse existing values. revision_var_urls = {} for line in config.revision_vars_url.splitlines(): job, vars_url = line.split(';') revision_var_urls[job] = vars_url for template in get_jobs_for_project(project, info): job_name = template.job_name(project) revision_var_urls[job_name] = REVISION_URL.format( project=project, bucket=_get_build_bucket_for_engine(template.engine), sanitizer=template.memory_tool) config.revision_vars_url = '\n'.join( '%s;%s' % (key_value, vars_url) for key_value, vars_url in revision_var_urls.iteritems()) config.put()
def _sync_revision_mappings(self, project, info): """Sync ClusterFuzz revision mappings.""" config = db_config.get() # Parse existing values. revision_var_urls = {} for line in config.revision_vars_url.splitlines(): job, vars_url = line.split(';') revision_var_urls[job] = vars_url for template in get_jobs_for_project(project, info): job_name = template.job_name(project) revision_var_urls[job_name] = self._revision_url_template.format( project=project, bucket=self._get_build_bucket(template.engine, template.architecture), sanitizer=template.memory_tool) config.revision_vars_url = '\n'.join( '%s;%s' % (key_value, vars_url) for key_value, vars_url in six.iteritems(revision_var_urls)) config.put()
def __init__(self, loop=None, db_pool=None, redis_pool=None, redis_cache_pool=None): self.company_code = base_config.get('company_code', '') self.db = PostgresDb(db_pool=db_pool) self.redis_pool = redis_pool self.redis_cache_pool = redis_cache_pool self.lang = base_config.get('lang') self.currency_symbol = {} self.loop = loop # 兼容浦发项目,使能与 center 共用认证的 redis if not self.company_code: key_template = '{}' else: key_template = self.company_code + 'oe-session:{}' self.session_mgr = SessionManager(db_config.get('database'), self.redis_pool, key_template=key_template)
def get(self): """Handle a get request.""" external_user_permissions = list( data_types.ExternalUserPermission.query().order( data_types.ExternalUserPermission.entity_kind, data_types.ExternalUserPermission.entity_name, data_types.ExternalUserPermission.email)) template_values = { 'config': db_config.get(), 'permissions': external_user_permissions, 'fieldValues': { 'csrf_token': form.generate_csrf_token(), 'user_permission_entity_kinds': USER_PERMISSION_ENTITY_KINDS, 'user_permission_auto_cc_types': USER_PERMISSION_AUTO_CC_TYPES, 'add_permission_url': '/add-external-user-permission', 'delete_permission_url': '/delete-external-user-permission', } } helpers.log('Configuration', helpers.VIEW_OPERATION) self.render('configuration.html', template_values)
def get(self): """Handle a get request.""" external_user_permissions = list( data_types.ExternalUserPermission.query().order( data_types.ExternalUserPermission.entity_kind, data_types.ExternalUserPermission.entity_name, data_types.ExternalUserPermission.email, )) template_values = { "config": db_config.get(), "permissions": external_user_permissions, "fieldValues": { "csrf_token": form.generate_csrf_token(), "user_permission_entity_kinds": USER_PERMISSION_ENTITY_KINDS, "user_permission_auto_cc_types": USER_PERMISSION_AUTO_CC_TYPES, "add_permission_url": "/add-external-user-permission", "delete_permission_url": "/delete-external-user-permission", }, } helpers.log("Configuration", helpers.VIEW_OPERATION) self.render("configuration.html", template_values)
def get_testcase_detail(testcase): """Get testcase detail for rendering the testcase detail page.""" config = db_config.get() crash_address = testcase.crash_address crash_state = testcase.crash_state crash_state_lines = crash_state.strip().splitlines() crash_type = data_handler.get_crash_type_string(testcase) reproduction_help_url = data_handler.get_reproduction_help_url( testcase, config) external_user = not access.has_access(job_type=testcase.job_type) issue_url = issue_tracker_utils.get_issue_url(testcase) metadata = testcase.get_metadata() original_testcase_size = _get_blob_size_string(testcase.fuzzed_keys) minimized_testcase_size = _get_blob_size_string(testcase.minimized_keys) has_issue_tracker = bool(data_handler.get_issue_tracker_name()) if not testcase.regression: regression = 'Pending' elif testcase.regression == 'NA': regression = 'NA' else: regression = _get_revision_range_html_from_string(testcase.job_type, testcase.regression) fixed_full = None if 'progression_pending' in metadata: fixed = 'Pending' elif not testcase.fixed: fixed = 'NO' elif testcase.fixed == 'NA': fixed = 'NA' elif testcase.fixed == 'Yes': fixed = 'YES' else: fixed = 'YES' fixed_full = _get_revision_range_html_from_string(testcase.job_type, testcase.fixed) last_tested = None last_tested_revision = ( metadata.get('last_tested_revision') or testcase.crash_revision) if last_tested_revision: last_tested = _get_revision_range_html(testcase.job_type, last_tested_revision) crash_revision = testcase.crash_revision crash_revisions_dict = revisions.get_component_revisions_dict( crash_revision, testcase.job_type) crash_stacktrace = data_handler.get_stacktrace(testcase) crash_stacktrace = filter_stacktrace(crash_stacktrace, testcase.crash_type, crash_revisions_dict) crash_stacktrace = convert_to_lines(crash_stacktrace, crash_state_lines, crash_type) crash_stacktrace_preview_lines = _preview_stacktrace(crash_stacktrace) second_crash_stacktrace_revision = metadata.get( 'second_crash_stacktrace_revision') second_crash_stacktrace_revisions_dict = ( revisions.get_component_revisions_dict(second_crash_stacktrace_revision, testcase.job_type)) second_crash_stacktrace = data_handler.get_stacktrace( testcase, stack_attribute='second_crash_stacktrace') second_crash_stacktrace = filter_stacktrace( second_crash_stacktrace, testcase.crash_type, second_crash_stacktrace_revisions_dict) second_crash_stacktrace = convert_to_lines(second_crash_stacktrace, crash_state_lines, crash_type) second_crash_stacktrace_preview_lines = _preview_stacktrace( second_crash_stacktrace) last_tested_crash_revision = metadata.get('last_tested_crash_revision') last_tested_crash_revisions_dict = revisions.get_component_revisions_dict( last_tested_crash_revision, testcase.job_type) last_tested_crash_stacktrace = data_handler.get_stacktrace( testcase, stack_attribute='last_tested_crash_stacktrace') last_tested_crash_stacktrace = filter_stacktrace( last_tested_crash_stacktrace, testcase.crash_type, last_tested_crash_revisions_dict) last_tested_crash_stacktrace = convert_to_lines(last_tested_crash_stacktrace, crash_state_lines, crash_type) last_tested_crash_stacktrace_preview_lines = _preview_stacktrace( last_tested_crash_stacktrace) privileged_user = access.has_access(need_privileged_access=True) # Fix build url link. |storage.cloud.google.com| takes care of using the # right set of authentication credentials needed to access the link. if 'build_url' in metadata: metadata['build_url'] = metadata['build_url'].replace( 'gs://', 'https://storage.cloud.google.com/') pending_blame_task = ( testcase.has_blame() and 'blame_pending' in metadata and metadata['blame_pending']) pending_impact_task = ( testcase.has_impacts() and not testcase.is_impact_set_flag) pending_minimize_task = not testcase.minimized_keys pending_progression_task = ('progression_pending' in metadata and metadata['progression_pending']) pending_regression_task = not testcase.regression pending_stack_task = testcase.last_tested_crash_stacktrace == 'Pending' needs_refresh = ( testcase.status == 'Pending' or ((testcase.status == 'Processed' or testcase.status == 'Duplicate') and (pending_blame_task or pending_impact_task or pending_minimize_task or pending_progression_task or pending_regression_task or pending_stack_task))) if data_types.SecuritySeverity.is_valid(testcase.security_severity): security_severity = severity_analyzer.severity_to_string( testcase.security_severity) else: security_severity = None auto_delete_timestamp = None auto_close_timestamp = None if testcase.one_time_crasher_flag: last_crash_time = ( crash_stats.get_last_crash_time(testcase) or testcase.timestamp) # Set auto-delete timestamp for unreproducible testcases with # no associated bug. if not testcase.bug_information: auto_delete_timestamp = utils.utc_datetime_to_timestamp( last_crash_time + datetime.timedelta( days=data_types.UNREPRODUCIBLE_TESTCASE_NO_BUG_DEADLINE)) # Set auto-close timestamp for unreproducible testcases with # an associated bug. if testcase.open and testcase.bug_information: auto_close_timestamp = utils.utc_datetime_to_timestamp( last_crash_time + datetime.timedelta( days=data_types.UNREPRODUCIBLE_TESTCASE_WITH_BUG_DEADLINE)) memory_tool_display_string = environment.get_memory_tool_display_string( testcase.job_type) memory_tool_display_label = memory_tool_display_string.split(':')[0] memory_tool_display_value = memory_tool_display_string.split(':')[1].strip() helpers.log('Testcase %s' % testcase.key.id(), helpers.VIEW_OPERATION) return { 'id': testcase.key.id(), 'crash_type': crash_type, 'crash_address': crash_address, 'crash_state': crash_state, # Used by reproduce tool. 'crash_state_lines': crash_state_lines, 'crash_revision': testcase.crash_revision, 'csrf_token': form.generate_csrf_token(), 'external_user': external_user, 'footer': testcase.comments, 'fixed': fixed, 'fixed_full': fixed_full, 'issue_url': issue_url, 'is_admin': auth.is_current_user_admin(), 'metadata': metadata, 'minimized_testcase_size': minimized_testcase_size, 'needs_refresh': needs_refresh, 'original_testcase_size': original_testcase_size, 'privileged_user': privileged_user, 'regression': regression, 'crash_stacktrace': { 'lines': crash_stacktrace, 'preview_lines': crash_stacktrace_preview_lines, 'revision': revisions.get_real_revision( crash_revision, testcase.job_type, display=True) }, 'second_crash_stacktrace': { 'lines': second_crash_stacktrace, 'preview_lines': second_crash_stacktrace_preview_lines, 'revision': revisions.get_real_revision( second_crash_stacktrace_revision, testcase.job_type, display=True) }, 'last_tested_crash_stacktrace': { 'lines': last_tested_crash_stacktrace, 'preview_lines': last_tested_crash_stacktrace_preview_lines, 'revision': revisions.get_real_revision( last_tested_crash_revision, testcase.job_type, display=True) }, 'security_severity': security_severity, 'security_severities': data_types.SecuritySeverity.list(), 'stats': { 'min_hour': crash_stats.get_min_hour(), 'max_hour': crash_stats.get_max_hour(), }, 'suspected_cls': _parse_suspected_cls(metadata.get('predator_result')), 'testcase': testcase, 'timestamp': utils.utc_datetime_to_timestamp(testcase.timestamp), 'show_blame': testcase.has_blame(), 'show_impact': testcase.has_impacts(), 'impacts_production': testcase.impacts_production(), 'find_similar_issues_options': FIND_SIMILAR_ISSUES_OPTIONS, 'auto_delete_timestamp': auto_delete_timestamp, 'auto_close_timestamp': auto_close_timestamp, 'memory_tool_display_label': memory_tool_display_label, 'memory_tool_display_value': memory_tool_display_value, 'last_tested': last_tested, 'is_admin_or_not_oss_fuzz': is_admin_or_not_oss_fuzz(), 'has_issue_tracker': has_issue_tracker, 'reproduction_help_url': reproduction_help_url, 'is_local_development': environment.is_running_on_app_engine_development(), }
def get_component_revisions_dict(revision, job_type): """Retrieve revision vars dict.""" if revision == 0 or revision == '0' or revision is None: # Return empty dict for zero start revision. return {} config = db_config.get() revision_info_url_format = db_config.get_value_for_job( config.revision_vars_url, job_type) if not revision_info_url_format: return None project_name = data_handler.get_project_name(job_type) revisions_dict = {} if utils.is_chromium(): component = data_handler.get_component_name(job_type) repository = data_handler.get_repository_for_component(component) if repository and not _is_clank(revision_info_url_format): revision_hash = _git_commit_position_to_git_hash_for_chromium( revision, repository) if revision_hash is None: return None # FIXME: While we check for this explicitly appended component in all # applicable cases that we know of within this codebase, if the dict # is shared with an external service (e.g. Predator) we may need to clean # this up beforehand. revisions_dict['/src'] = { 'name': _get_component_display_name(component, project_name), 'url': _git_url_for_chromium_repository(repository), 'rev': revision_hash, 'commit_pos': revision } # Use revision hash for info url later. revision = revision_hash revision_info_url = revision_info_url_format % revision url_content = _get_url_content(revision_info_url) if not url_content: logs.log_error('Failed to get component revisions from %s.' % revision_info_url) return None # Parse as per DEPS format. if _is_deps(revision_info_url): deps_revisions_dict = deps_to_revisions_dict(url_content) if not deps_revisions_dict: return None revisions_dict.update(deps_revisions_dict) return revisions_dict # Parse as per Clank DEPS format. if _is_clank(revision_info_url): return _clank_revision_file_to_revisions_dict(url_content) # Default case: parse content as yaml. revisions_dict = _to_dict(url_content) if not revisions_dict: logs.log_error('Failed to parse component revisions from %s.' % revision_info_url) return None # Parse as per source map format. if revision_info_url.endswith(SOURCE_MAP_EXTENSION): revisions_dict = _src_map_to_revisions_dict(revisions_dict, project_name) return revisions_dict
def post(self): """Handle a post request.""" config = db_config.get() if not config: config = data_types.Config() previous_hash = self.request.get("previous_hash") if config.previous_hash and config.previous_hash != previous_hash: raise helpers.EarlyExitException( "Your change conflicts with another configuration update. " "Please refresh and try again.", 500, ) build_apiary_service_account_email = self.request.get( "build_apiary_service_account_email") build_apiary_service_account_private_key = self.request.get( "build_apiary_service_account_private_key") bug_report_url = self.request.get("bug_report_url") client_credentials = self.request.get("client_credentials") component_repository_mappings = self.request.get( "component_repository_mappings") contact_string = self.request.get("contact_string") documentation_url = self.request.get("documentation_url") github_credentials = self.request.get("github_credentials") platform_group_mappings = self.request.get("platform_group_mappings") privileged_users = self.request.get("privileged_users") relax_security_bug_restrictions = self.request.get( "relax_security_bug_restrictions") relax_testcase_restrictions = self.request.get( "relax_testcase_restrictions") reproduce_tool_client_id = self.request.get("reproduce_tool_client_id") reproduce_tool_client_secret = self.request.get( "reproduce_tool_client_secret") reproduction_help_url = self.request.get("reproduction_help_url") test_account_email = self.request.get("test_account_email") test_account_password = self.request.get("test_account_password") wifi_ssid = self.request.get("wifi_ssid") wifi_password = self.request.get("wifi_password") sendgrid_api_key = self.request.get("sendgrid_api_key") sendgrid_sender = self.request.get("sendgrid_sender") config.build_apiary_service_account_email = build_apiary_service_account_email config.build_apiary_service_account_private_key = ( build_apiary_service_account_private_key) config.bug_report_url = bug_report_url config.client_credentials = client_credentials config.component_repository_mappings = component_repository_mappings config.contact_string = contact_string config.documentation_url = documentation_url config.github_credentials = github_credentials config.platform_group_mappings = platform_group_mappings config.privileged_users = privileged_users config.relax_security_bug_restrictions = bool( relax_security_bug_restrictions) config.relax_testcase_restrictions = bool(relax_testcase_restrictions) config.reproduce_tool_client_id = reproduce_tool_client_id config.reproduce_tool_client_secret = reproduce_tool_client_secret config.reproduction_help_url = reproduction_help_url config.test_account_email = test_account_email config.test_account_password = test_account_password config.wifi_ssid = wifi_ssid config.wifi_password = wifi_password config.sendgrid_api_key = sendgrid_api_key config.sendgrid_sender = sendgrid_sender helpers.log("Configuration", helpers.MODIFY_OPERATION) # Before hashing the entity, we must put it so that the internal maps are # updated. config.put() config.previous_hash = utils.entity_hash(config) config.put() template_values = { "title": "Success", "message": ("Configuration is successfully updated. " "Redirecting to the configuration page..."), "redirect_url": "/configuration", } self.render("message.html", template_values)
def get_testcase_detail(testcase): """Get testcase detail for rendering the testcase detail page.""" config = db_config.get() crash_address = testcase.crash_address crash_state = testcase.crash_state crash_state_lines = crash_state.strip().splitlines() crash_type = data_handler.get_crash_type_string(testcase) external_user = not access.has_access(job_type=testcase.job_type) issue_url = issue_tracker_utils.get_issue_url(testcase) metadata = testcase.get_metadata() original_testcase_size = _get_blob_size_string(testcase.fuzzed_keys) minimized_testcase_size = _get_blob_size_string(testcase.minimized_keys) has_issue_tracker = bool(data_handler.get_issue_tracker_name()) fuzzer_display = data_handler.get_fuzzer_display(testcase) formatted_reproduction_help = _format_reproduction_help( data_handler.get_formatted_reproduction_help(testcase)) # When we have a HELP_TEMPLATE, ignore any default values set for HELP_URL. if not formatted_reproduction_help: reproduction_help_url = data_handler.get_reproduction_help_url( testcase, config) else: reproduction_help_url = None if not testcase.regression: regression = "Pending" elif testcase.regression == "NA": regression = "NA" else: regression = _get_revision_range_html_from_string( testcase.job_type, testcase.regression) fixed_full = None if "progression_pending" in metadata: fixed = "Pending" elif not testcase.fixed: fixed = "NO" elif testcase.fixed == "NA": fixed = "NA" elif testcase.fixed == "Yes": fixed = "YES" else: fixed = "YES" fixed_full = _get_revision_range_html_from_string( testcase.job_type, testcase.fixed) last_tested = None last_tested_revision = (metadata.get("last_tested_revision") or testcase.crash_revision) if last_tested_revision: last_tested = _get_revision_range_html(testcase.job_type, last_tested_revision) crash_revision = testcase.crash_revision crash_revisions_dict = revisions.get_component_revisions_dict( crash_revision, testcase.job_type) crash_stacktrace = data_handler.get_stacktrace(testcase) crash_stacktrace = filter_stacktrace(crash_stacktrace, testcase.crash_type, crash_revisions_dict) crash_stacktrace = convert_to_lines(crash_stacktrace, crash_state_lines, crash_type) crash_stacktrace_preview_lines = _preview_stacktrace(crash_stacktrace) last_tested_crash_revision = metadata.get("last_tested_crash_revision") last_tested_crash_revisions_dict = revisions.get_component_revisions_dict( last_tested_crash_revision, testcase.job_type) last_tested_crash_stacktrace = data_handler.get_stacktrace( testcase, stack_attribute="last_tested_crash_stacktrace") last_tested_crash_stacktrace = filter_stacktrace( last_tested_crash_stacktrace, testcase.crash_type, last_tested_crash_revisions_dict, ) last_tested_crash_stacktrace = convert_to_lines( last_tested_crash_stacktrace, crash_state_lines, crash_type) last_tested_crash_stacktrace_preview_lines = _preview_stacktrace( last_tested_crash_stacktrace) privileged_user = access.has_access(need_privileged_access=True) # Fix build url link. |storage.cloud.google.com| takes care of using the # right set of authentication credentials needed to access the link. if "build_url" in metadata: metadata["build_url"] = metadata["build_url"].replace( "gs://", "https://storage.cloud.google.com/") pending_blame_task = (testcase.has_blame() and "blame_pending" in metadata and metadata["blame_pending"]) pending_impact_task = testcase.has_impacts( ) and not testcase.is_impact_set_flag pending_minimize_task = not testcase.minimized_keys pending_progression_task = ("progression_pending" in metadata and metadata["progression_pending"]) pending_regression_task = not testcase.regression pending_stack_task = testcase.last_tested_crash_stacktrace == "Pending" needs_refresh = testcase.status == "Pending" or ( (testcase.status == "Processed" or testcase.status == "Duplicate") and (pending_blame_task or pending_impact_task or pending_minimize_task or pending_progression_task or pending_regression_task or pending_stack_task)) if data_types.SecuritySeverity.is_valid(testcase.security_severity): security_severity = severity_analyzer.severity_to_string( testcase.security_severity) else: security_severity = None auto_delete_timestamp = None auto_close_timestamp = None if testcase.one_time_crasher_flag: last_crash_time = (crash_stats.get_last_crash_time(testcase) or testcase.timestamp) # Set auto-delete timestamp for unreproducible testcases with # no associated bug. if not testcase.bug_information: auto_delete_timestamp = utils.utc_datetime_to_timestamp( last_crash_time + datetime.timedelta( days=data_types.UNREPRODUCIBLE_TESTCASE_NO_BUG_DEADLINE)) # Set auto-close timestamp for unreproducible testcases with # an associated bug. if testcase.open and testcase.bug_information: auto_close_timestamp = utils.utc_datetime_to_timestamp( last_crash_time + datetime.timedelta( days=data_types.UNREPRODUCIBLE_TESTCASE_WITH_BUG_DEADLINE)) memory_tool_display_string = environment.get_memory_tool_display_string( testcase.job_type) memory_tool_display_label = memory_tool_display_string.split(":")[0] memory_tool_display_value = memory_tool_display_string.split( ":")[1].strip() helpers.log("Testcase %s" % testcase.key.id(), helpers.VIEW_OPERATION) return { "id": testcase.key.id(), "crash_type": crash_type, "crash_address": crash_address, "crash_state": crash_state, # Used by reproduce tool. "crash_state_lines": crash_state_lines, "crash_revision": testcase.crash_revision, "csrf_token": form.generate_csrf_token(), "external_user": external_user, "footer": testcase.comments, "formatted_reproduction_help": formatted_reproduction_help, "fixed": fixed, "fixed_full": fixed_full, "issue_url": issue_url, "is_admin": auth.is_current_user_admin(), "metadata": metadata, "minimized_testcase_size": minimized_testcase_size, "needs_refresh": needs_refresh, "original_testcase_size": original_testcase_size, "privileged_user": privileged_user, "regression": regression, "crash_stacktrace": { "lines": crash_stacktrace, "preview_lines": crash_stacktrace_preview_lines, "revision": revisions.get_real_revision(crash_revision, testcase.job_type, display=True), }, "last_tested_crash_stacktrace": { "lines": last_tested_crash_stacktrace, "preview_lines": last_tested_crash_stacktrace_preview_lines, "revision": revisions.get_real_revision(last_tested_crash_revision, testcase.job_type, display=True), }, "security_severity": security_severity, "security_severities": data_types.SecuritySeverity.list(), "stats": { "min_hour": crash_stats.get_min_hour(), "max_hour": crash_stats.get_max_hour(), }, "suspected_cls": _parse_suspected_cls(metadata.get("predator_result")), "testcase": testcase, "timestamp": utils.utc_datetime_to_timestamp(testcase.timestamp), "show_blame": testcase.has_blame(), "show_impact": testcase.has_impacts(), "impacts_production": testcase.impacts_production(), "find_similar_issues_options": FIND_SIMILAR_ISSUES_OPTIONS, "auto_delete_timestamp": auto_delete_timestamp, "auto_close_timestamp": auto_close_timestamp, "memory_tool_display_label": memory_tool_display_label, "memory_tool_display_value": memory_tool_display_value, "last_tested": last_tested, "is_admin_or_not_oss_fuzz": is_admin_or_not_oss_fuzz(), "has_issue_tracker": has_issue_tracker, "reproduction_help_url": reproduction_help_url, "is_local_development": environment.is_running_on_app_engine_development(), "fuzzer_display": vars(fuzzer_display), }
def issue_url(self, issue_id): """Return the issue URL with the given ID.""" config = db_config.get() url = config.jira_url + '/browse/' + str(issue_id) return url
def test_execute(self): """Tests executing of cron job.""" mock_storage = mock.MagicMock() mock_storage.buckets().insert().execute.return_value = 'timeCreated' self.mock.get_application_id_1.return_value = 'clusterfuzz-external' self.mock.get_application_id_2.return_value = 'clusterfuzz-external' self.mock.build.return_value = mock_storage pubsub_client = pubsub.PubSubClient() unmanaged_topic_name = pubsub.topic_name(app_identity.get_application_id(), 'jobs-linux') old_topic_name = pubsub.topic_name(app_identity.get_application_id(), 'jobs-shouldbedeleted') old_subscription_name = pubsub.subscription_name( app_identity.get_application_id(), 'jobs-shouldbedeleted') other_topic_name = pubsub.topic_name(app_identity.get_application_id(), 'other') pubsub_client.create_topic(unmanaged_topic_name) pubsub_client.create_topic(old_topic_name) pubsub_client.create_topic(other_topic_name) pubsub_client.create_subscription(old_subscription_name, old_topic_name) self.mock.get_projects.return_value = [ ('lib1', { 'homepage': 'http://example.com', 'primary_contact': '*****@*****.**', 'auto_ccs': [ '*****@*****.**', '*****@*****.**', ], }), ('lib2', { 'homepage': 'http://example2.com', 'disabled': True, 'fuzzing_engines': ['libfuzzer',], }), ('lib3', { 'homepage': 'http://example3.com', 'sanitizers': [ 'address', { 'memory': { 'experimental': True, }, }, 'undefined', ], 'auto_ccs': '*****@*****.**', 'disabled': False, 'fuzzing_engines': ['libfuzzer',], 'view_restrictions': 'none', }), ('lib4', { 'homepage': 'http://example4.com', 'sanitizers': ['address'], 'auto_ccs': '*****@*****.**', 'fuzzing_engines': ['none',], }), ('lib5', { 'homepage': 'http://example5.com', 'sanitizers': ['address'], 'fuzzing_engines': ['libfuzzer',], 'experimental': True, 'selective_unpack': True, }), ] mock_storage.buckets().get.side_effect = mock_bucket_get mock_storage.buckets().getIamPolicy.side_effect = mock_get_iam_policy mock_storage.buckets().setIamPolicy = CopyingMock() mock_storage.buckets().setIamPolicy.side_effect = mock_set_iam_policy self.app.get('/setup') job = data_types.Job.query( data_types.Job.name == 'libfuzzer_asan_lib1').get() self.assertIsNotNone(job) self.assertEqual(job.platform, 'LIB1_LINUX') self.assertItemsEqual(job.templates, ['asan', 'libfuzzer']) self.assertEqual( job.environment_string, 'RELEASE_BUILD_BUCKET_PATH = ' 'gs://clusterfuzz-builds/lib1/lib1-address-([0-9]+).zip\n' 'FUZZ_LOGS_BUCKET = lib1-logs.clusterfuzz-external.appspot.com\n' 'CORPUS_BUCKET = lib1-corpus.clusterfuzz-external.appspot.com\n' 'QUARANTINE_BUCKET = lib1-quarantine.clusterfuzz-external.appspot.com\n' 'BACKUP_BUCKET = lib1-backup.clusterfuzz-external.appspot.com\n' 'AUTOMATIC_LABELS = Proj-lib1,Engine-libfuzzer\n' 'PROJECT_NAME = lib1\n' 'SUMMARY_PREFIX = lib1\n' 'REVISION_VARS_URL = https://commondatastorage.googleapis.com/' 'clusterfuzz-builds/lib1/lib1-address-%s.srcmap.json\n' 'MANAGED = True\n') job = data_types.Job.query( data_types.Job.name == 'libfuzzer_asan_lib2').get() self.assertIsNotNone(job) self.assertEqual(job.platform, 'LIB2_LINUX') self.assertItemsEqual(job.templates, ['asan', 'libfuzzer']) self.assertEqual( job.environment_string, 'RELEASE_BUILD_BUCKET_PATH = ' 'gs://clusterfuzz-builds/lib2/lib2-address-([0-9]+).zip\n' 'FUZZ_LOGS_BUCKET = lib2-logs.clusterfuzz-external.appspot.com\n' 'CORPUS_BUCKET = lib2-corpus.clusterfuzz-external.appspot.com\n' 'QUARANTINE_BUCKET = lib2-quarantine.clusterfuzz-external.appspot.com\n' 'BACKUP_BUCKET = lib2-backup.clusterfuzz-external.appspot.com\n' 'AUTOMATIC_LABELS = Proj-lib2,Engine-libfuzzer\n' 'PROJECT_NAME = lib2\n' 'SUMMARY_PREFIX = lib2\n' 'REVISION_VARS_URL = https://commondatastorage.googleapis.com/' 'clusterfuzz-builds/lib2/lib2-address-%s.srcmap.json\n' 'MANAGED = True\n') job = data_types.Job.query( data_types.Job.name == 'libfuzzer_asan_lib3').get() self.assertIsNotNone(job) self.assertEqual(job.platform, 'LIB3_LINUX') self.assertItemsEqual(job.templates, ['asan', 'libfuzzer']) self.assertEqual( job.environment_string, 'RELEASE_BUILD_BUCKET_PATH = ' 'gs://clusterfuzz-builds/lib3/lib3-address-([0-9]+).zip\n' 'FUZZ_LOGS_BUCKET = lib3-logs.clusterfuzz-external.appspot.com\n' 'CORPUS_BUCKET = lib3-corpus.clusterfuzz-external.appspot.com\n' 'QUARANTINE_BUCKET = lib3-quarantine.clusterfuzz-external.appspot.com\n' 'BACKUP_BUCKET = lib3-backup.clusterfuzz-external.appspot.com\n' 'AUTOMATIC_LABELS = Proj-lib3,Engine-libfuzzer\n' 'PROJECT_NAME = lib3\n' 'SUMMARY_PREFIX = lib3\n' 'REVISION_VARS_URL = https://commondatastorage.googleapis.com/' 'clusterfuzz-builds/lib3/lib3-address-%s.srcmap.json\n' 'MANAGED = True\n' 'ISSUE_VIEW_RESTRICTIONS = none\n') job = data_types.Job.query( data_types.Job.name == 'libfuzzer_msan_lib3').get() self.assertIsNotNone(job) self.assertEqual(job.platform, 'LIB3_LINUX') self.assertItemsEqual(job.templates, ['msan', 'libfuzzer']) self.assertEqual( job.environment_string, 'RELEASE_BUILD_BUCKET_PATH = ' 'gs://clusterfuzz-builds/lib3/lib3-memory-([0-9]+).zip\n' 'FUZZ_LOGS_BUCKET = lib3-logs.clusterfuzz-external.appspot.com\n' 'CORPUS_BUCKET = lib3-corpus.clusterfuzz-external.appspot.com\n' 'QUARANTINE_BUCKET = lib3-quarantine.clusterfuzz-external.appspot.com\n' 'BACKUP_BUCKET = lib3-backup.clusterfuzz-external.appspot.com\n' 'AUTOMATIC_LABELS = Proj-lib3,Engine-libfuzzer\n' 'PROJECT_NAME = lib3\n' 'SUMMARY_PREFIX = lib3\n' 'REVISION_VARS_URL = https://commondatastorage.googleapis.com/' 'clusterfuzz-builds/lib3/lib3-memory-%s.srcmap.json\n' 'MANAGED = True\n' 'EXPERIMENTAL = True\n' 'ISSUE_VIEW_RESTRICTIONS = none\n') job = data_types.Job.query( data_types.Job.name == 'libfuzzer_ubsan_lib3').get() self.assertIsNotNone(job) self.assertEqual(job.platform, 'LIB3_LINUX') self.assertItemsEqual(job.templates, ['ubsan', 'libfuzzer']) self.assertEqual( job.environment_string, 'RELEASE_BUILD_BUCKET_PATH = ' 'gs://clusterfuzz-builds/lib3/lib3-undefined-([0-9]+).zip\n' 'FUZZ_LOGS_BUCKET = lib3-logs.clusterfuzz-external.appspot.com\n' 'CORPUS_BUCKET = lib3-corpus.clusterfuzz-external.appspot.com\n' 'QUARANTINE_BUCKET = lib3-quarantine.clusterfuzz-external.appspot.com\n' 'BACKUP_BUCKET = lib3-backup.clusterfuzz-external.appspot.com\n' 'AUTOMATIC_LABELS = Proj-lib3,Engine-libfuzzer\n' 'PROJECT_NAME = lib3\n' 'SUMMARY_PREFIX = lib3\n' 'REVISION_VARS_URL = https://commondatastorage.googleapis.com/' 'clusterfuzz-builds/lib3/lib3-undefined-%s.srcmap.json\n' 'MANAGED = True\n' 'ISSUE_VIEW_RESTRICTIONS = none\n') job = data_types.Job.query(data_types.Job.name == 'afl_asan_lib1').get() self.assertIsNotNone(job) self.assertEqual(job.platform, 'LIB1_LINUX') self.assertItemsEqual(job.templates, ['asan', 'afl']) self.assertEqual( job.environment_string, 'RELEASE_BUILD_BUCKET_PATH = ' 'gs://clusterfuzz-builds-afl/lib1/lib1-address-([0-9]+).zip\n' 'FUZZ_LOGS_BUCKET = lib1-logs.clusterfuzz-external.appspot.com\n' 'CORPUS_BUCKET = lib1-corpus.clusterfuzz-external.appspot.com\n' 'QUARANTINE_BUCKET = lib1-quarantine.clusterfuzz-external.appspot.com\n' 'BACKUP_BUCKET = lib1-backup.clusterfuzz-external.appspot.com\n' 'AUTOMATIC_LABELS = Proj-lib1,Engine-afl\n' 'PROJECT_NAME = lib1\n' 'SUMMARY_PREFIX = lib1\n' 'REVISION_VARS_URL = https://commondatastorage.googleapis.com/' 'clusterfuzz-builds-afl/lib1/lib1-address-%s.srcmap.json\n' 'MANAGED = True\n' 'MINIMIZE_JOB_OVERRIDE = libfuzzer_asan_lib1\n') # Engine-less job. Manually managed. job = data_types.Job.query(data_types.Job.name == 'asan_lib4').get() self.assertIsNone(job) job = data_types.Job.query( data_types.Job.name == 'libfuzzer_asan_lib5').get() self.assertEqual(job.platform, 'LIB5_LINUX') self.assertEqual( job.environment_string, 'RELEASE_BUILD_BUCKET_PATH = ' 'gs://clusterfuzz-builds/lib5/lib5-address-([0-9]+).zip\n' 'FUZZ_LOGS_BUCKET = lib5-logs.clusterfuzz-external.appspot.com\n' 'CORPUS_BUCKET = lib5-corpus.clusterfuzz-external.appspot.com\n' 'QUARANTINE_BUCKET = lib5-quarantine.clusterfuzz-external.appspot.com\n' 'BACKUP_BUCKET = lib5-backup.clusterfuzz-external.appspot.com\n' 'AUTOMATIC_LABELS = Proj-lib5,Engine-libfuzzer\n' 'PROJECT_NAME = lib5\n' 'SUMMARY_PREFIX = lib5\n' 'REVISION_VARS_URL = https://commondatastorage.googleapis.com/' 'clusterfuzz-builds/lib5/lib5-address-%s.srcmap.json\n' 'MANAGED = True\n' 'EXPERIMENTAL = True\n' 'UNPACK_ALL_FUZZ_TARGETS_AND_FILES = False\n') config = db_config.get() self.maxDiff = None # pylint: disable=invalid-name self.assertItemsEqual(config.revision_vars_url.splitlines(), [ u'libfuzzer_asan_lib2;https://commondatastorage.googleapis.com/' 'clusterfuzz-builds/lib2/lib2-address-%s.srcmap.json', u'libfuzzer_asan_lib3;https://commondatastorage.googleapis.com/' 'clusterfuzz-builds/lib3/lib3-address-%s.srcmap.json', u'libfuzzer_asan_lib1;https://commondatastorage.googleapis.com/' 'clusterfuzz-builds/lib1/lib1-address-%s.srcmap.json', u'libfuzzer_ubsan_lib1;https://commondatastorage.googleapis.com/' 'clusterfuzz-builds/lib1/lib1-undefined-%s.srcmap.json', u'libfuzzer_ubsan_lib2;https://commondatastorage.googleapis.com/' 'clusterfuzz-builds/lib2/lib2-undefined-%s.srcmap.json', u'afl_asan_lib1;https://commondatastorage.googleapis.com/' 'clusterfuzz-builds-afl/lib1/lib1-address-%s.srcmap.json', u'blah;url2', u'libfuzzer_ubsan_lib3;https://commondatastorage.googleapis.com/' 'clusterfuzz-builds/lib3/lib3-undefined-%s.srcmap.json', u'libfuzzer_msan_lib3;https://commondatastorage.googleapis.com/' 'clusterfuzz-builds/lib3/lib3-memory-%s.srcmap.json', u'asan_lib4;https://commondatastorage.googleapis.com/' 'clusterfuzz-builds-no-engine/lib4/lib4-address-%s.srcmap.json', u'libfuzzer_asan_lib5;https://commondatastorage.googleapis.com/' 'clusterfuzz-builds/lib5/lib5-address-%s.srcmap.json', ]) libfuzzer = data_types.Fuzzer.query( data_types.Fuzzer.name == 'libFuzzer').get() self.assertItemsEqual(libfuzzer.jobs, [ 'libfuzzer_asan_lib1', 'libfuzzer_asan_lib3', 'libfuzzer_asan_lib5', 'libfuzzer_msan_lib3', 'libfuzzer_ubsan_lib1', 'libfuzzer_ubsan_lib3', ]) afl = data_types.Fuzzer.query(data_types.Fuzzer.name == 'afl').get() self.assertItemsEqual(afl.jobs, [ 'afl_asan_lib1', ]) # Test that old unused jobs are deleted. self.assertIsNone( data_types.Job.query( data_types.Job.name == 'libfuzzer_asan_old_job').get()) self.assertIsNone( data_types.Job.query( data_types.Job.name == 'libfuzzer_msan_old_job').get()) # Unmanaged job should still exist. self.assertIsNotNone( data_types.Job.query(data_types.Job.name == 'unmanaged_job').get()) # Test that project settings are created. lib1_settings = ndb.Key(data_types.OssFuzzProject, 'lib1').get() self.assertIsNotNone(lib1_settings) self.assertDictEqual({ 'cpu_weight': 1.5, 'name': 'lib1', 'disk_size_gb': None, 'service_account': '*****@*****.**', 'high_end': False, 'ccs': [ '*****@*****.**', '*****@*****.**', '*****@*****.**' ], }, lib1_settings.to_dict()) lib2_settings = ndb.Key(data_types.OssFuzzProject, 'lib2').get() self.assertIsNone(lib2_settings) lib3_settings = ndb.Key(data_types.OssFuzzProject, 'lib3').get() self.assertIsNotNone(lib3_settings) self.assertDictEqual({ 'cpu_weight': 1.0, 'name': 'lib3', 'disk_size_gb': None, 'service_account': '*****@*****.**', 'high_end': False, 'ccs': ['*****@*****.**'], }, lib3_settings.to_dict()) lib4_settings = ndb.Key(data_types.OssFuzzProject, 'lib4').get() self.assertIsNotNone(lib4_settings) self.assertDictEqual({ 'cpu_weight': 1.0, 'name': 'lib4', 'disk_size_gb': None, 'service_account': '*****@*****.**', 'high_end': True, 'ccs': ['*****@*****.**'], }, lib4_settings.to_dict()) old_lib_settings = ndb.Key(data_types.OssFuzzProject, 'old_lib').get() self.assertIsNone(old_lib_settings) mock_storage.buckets().get.assert_has_calls([ mock.call(bucket='lib1-backup.clusterfuzz-external.appspot.com'), mock.call(bucket='lib1-corpus.clusterfuzz-external.appspot.com'), mock.call(bucket='lib1-quarantine.clusterfuzz-external.appspot.com'), mock.call(bucket='lib1-logs.clusterfuzz-external.appspot.com'), mock.call(bucket='lib2-backup.clusterfuzz-external.appspot.com'), mock.call(bucket='lib2-corpus.clusterfuzz-external.appspot.com'), mock.call(bucket='lib2-quarantine.clusterfuzz-external.appspot.com'), mock.call(bucket='lib2-logs.clusterfuzz-external.appspot.com'), mock.call(bucket='lib3-backup.clusterfuzz-external.appspot.com'), mock.call(bucket='lib3-corpus.clusterfuzz-external.appspot.com'), mock.call(bucket='lib3-quarantine.clusterfuzz-external.appspot.com'), mock.call(bucket='lib3-logs.clusterfuzz-external.appspot.com'), ]) mock_storage.buckets().insert.assert_has_calls([ mock.call( body={ 'name': 'lib1-backup.clusterfuzz-external.appspot.com', 'lifecycle': { 'rule': [{ 'action': { 'type': 'Delete' }, 'condition': { 'age': 100 } }] } }, project='clusterfuzz-external'), mock.call().execute(), mock.call( body={'name': 'lib1-corpus.clusterfuzz-external.appspot.com'}, project='clusterfuzz-external'), mock.call().execute(), mock.call( body={ 'name': 'lib1-quarantine.clusterfuzz-external.appspot.com', 'lifecycle': { 'rule': [{ 'action': { 'type': 'Delete' }, 'condition': { 'age': 90 } }] } }, project='clusterfuzz-external'), mock.call().execute(), mock.call( body={ 'name': 'lib2-backup.clusterfuzz-external.appspot.com', 'lifecycle': { 'rule': [{ 'action': { 'type': 'Delete' }, 'condition': { 'age': 100 } }] } }, project='clusterfuzz-external'), mock.call().execute(), mock.call( body={'name': 'lib2-corpus.clusterfuzz-external.appspot.com'}, project='clusterfuzz-external'), mock.call().execute(), mock.call( body={ 'name': 'lib2-quarantine.clusterfuzz-external.appspot.com', 'lifecycle': { 'rule': [{ 'action': { 'type': 'Delete' }, 'condition': { 'age': 90 } }] } }, project='clusterfuzz-external'), mock.call().execute(), mock.call( body={ 'name': 'lib2-logs.clusterfuzz-external.appspot.com', 'lifecycle': { 'rule': [{ 'action': { 'type': 'Delete' }, 'condition': { 'age': 14 } }] } }, project='clusterfuzz-external'), mock.call().execute(), mock.call( body={ 'name': 'lib3-backup.clusterfuzz-external.appspot.com', 'lifecycle': { 'rule': [{ 'action': { 'type': 'Delete' }, 'condition': { 'age': 100 } }] } }, project='clusterfuzz-external'), mock.call().execute(), mock.call( body={'name': 'lib3-corpus.clusterfuzz-external.appspot.com'}, project='clusterfuzz-external'), mock.call().execute(), mock.call( body={ 'name': 'lib3-quarantine.clusterfuzz-external.appspot.com', 'lifecycle': { 'rule': [{ 'action': { 'type': 'Delete' }, 'condition': { 'age': 90 } }] } }, project='clusterfuzz-external'), mock.call().execute(), mock.call( body={ 'name': 'lib3-logs.clusterfuzz-external.appspot.com', 'lifecycle': { 'rule': [{ 'action': { 'type': 'Delete' }, 'condition': { 'age': 14 } }] } }, project='clusterfuzz-external'), mock.call().execute(), ]) mock_storage.buckets().setIamPolicy.assert_has_calls([ mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['user:[email protected]'] }] }, bucket='lib1-backup.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['user:[email protected]'] }] }, bucket='lib1-backup.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': [ 'user:[email protected]', 'user:[email protected]' ] }] }, bucket='lib1-backup.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': [ 'user:[email protected]', 'user:[email protected]' ] }, { 'role': 'roles/storage.objectAdmin', 'members': ['serviceAccount:[email protected]'] }] }, bucket='lib1-backup.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['user:[email protected]'] }] }, bucket='lib1-corpus.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['user:[email protected]'] }] }, bucket='lib1-corpus.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': [ 'user:[email protected]', 'user:[email protected]' ] }] }, bucket='lib1-corpus.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': [ 'user:[email protected]', 'user:[email protected]' ] }, { 'role': 'roles/storage.objectAdmin', 'members': ['serviceAccount:[email protected]'] }] }, bucket='lib1-corpus.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': [ 'user:[email protected]', 'user:[email protected]' ] }] }, bucket='lib1-logs.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': [ 'user:[email protected]', 'user:[email protected]' ] }] }, bucket='lib1-logs.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': [ 'user:[email protected]', 'user:[email protected]' ] }, { 'role': 'roles/storage.objectAdmin', 'members': ['serviceAccount:[email protected]'] }] }, bucket='lib1-logs.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['user:[email protected]'] }] }, bucket='lib1-quarantine.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['user:[email protected]'] }] }, bucket='lib1-quarantine.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': [ 'user:[email protected]', 'user:[email protected]' ] }] }, bucket='lib1-quarantine.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': [ 'user:[email protected]', 'user:[email protected]' ] }, { 'role': 'roles/storage.objectAdmin', 'members': ['serviceAccount:[email protected]'] }] }, bucket='lib1-quarantine.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['serviceAccount:[email protected]'] }] }, bucket='clusterfuzz-external-deployment'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['serviceAccount:[email protected]'] }] }, bucket='test-shared-corpus-bucket'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['serviceAccount:[email protected]'] }] }, bucket='test-mutator-plugins-bucket'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['serviceAccount:[email protected]'] }] }, bucket=u'global-corpus.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectAdmin', 'members': ['serviceAccount:[email protected]'] }] }, bucket='lib2-backup.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectAdmin', 'members': ['serviceAccount:[email protected]'] }] }, bucket='lib2-corpus.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectAdmin', 'members': ['serviceAccount:[email protected]'] }] }, bucket='lib2-logs.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectAdmin', 'members': ['serviceAccount:[email protected]'] }] }, bucket='lib2-quarantine.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['serviceAccount:[email protected]'] }] }, bucket='clusterfuzz-external-deployment'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['serviceAccount:[email protected]'] }] }, bucket='test-shared-corpus-bucket'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['serviceAccount:[email protected]'] }] }, bucket='test-mutator-plugins-bucket'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['serviceAccount:[email protected]'] }] }, bucket=u'global-corpus.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['user:[email protected]'] }] }, bucket='lib3-backup.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['user:[email protected]'] }, { 'role': 'roles/storage.objectAdmin', 'members': ['serviceAccount:[email protected]'] }] }, bucket='lib3-backup.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['user:[email protected]'] }] }, bucket='lib3-corpus.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['user:[email protected]'] }, { 'role': 'roles/storage.objectAdmin', 'members': ['serviceAccount:[email protected]'] }] }, bucket='lib3-corpus.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['user:[email protected]'] }, { 'role': 'roles/storage.objectAdmin', 'members': ['serviceAccount:[email protected]'] }] }, bucket='lib3-logs.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['user:[email protected]'] }] }, bucket='lib3-quarantine.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['user:[email protected]'] }, { 'role': 'roles/storage.objectAdmin', 'members': ['serviceAccount:[email protected]'] }] }, bucket='lib3-quarantine.clusterfuzz-external.appspot.com'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['serviceAccount:[email protected]'] }] }, bucket='clusterfuzz-external-deployment'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['serviceAccount:[email protected]'] }] }, bucket='test-shared-corpus-bucket'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['serviceAccount:[email protected]'] }] }, bucket='test-mutator-plugins-bucket'), mock.call( body={ 'resourceId': 'fake', 'kind': 'storage#policy', 'etag': 'fake', 'bindings': [{ 'role': 'roles/storage.objectViewer', 'members': ['serviceAccount:[email protected]'] }] }, bucket=u'global-corpus.clusterfuzz-external.appspot.com') ]) mappings = data_types.FuzzerJob.query() tags_fuzzers_and_jobs = [(m.platform, m.fuzzer, m.job) for m in mappings] self.assertItemsEqual(tags_fuzzers_and_jobs, [ ('LIB1_LINUX', 'afl', 'afl_asan_lib1'), ('LIB1_LINUX', 'libFuzzer', 'libfuzzer_asan_lib1'), ('LIB3_LINUX', 'libFuzzer', 'libfuzzer_asan_lib3'), ('LIB3_LINUX', 'libFuzzer', 'libfuzzer_msan_lib3'), ('LIB1_LINUX', 'libFuzzer', 'libfuzzer_ubsan_lib1'), ('LIB3_LINUX', 'libFuzzer', 'libfuzzer_ubsan_lib3'), ('LIB5_LINUX', 'libFuzzer', 'libfuzzer_asan_lib5'), ]) all_permissions = [ entity.to_dict() for entity in data_types.ExternalUserPermission.query() ] self.assertItemsEqual(all_permissions, [{ 'entity_kind': 1L, 'is_prefix': False, 'auto_cc': 1L, 'entity_name': u'libfuzzer_asan_lib1', 'email': u'*****@*****.**' }, { 'entity_kind': 1L, 'is_prefix': False, 'auto_cc': 1L, 'entity_name': u'libfuzzer_ubsan_lib1', 'email': u'*****@*****.**' }, { 'entity_kind': 1L, 'is_prefix': False, 'auto_cc': 1L, 'entity_name': u'libfuzzer_ubsan_lib1', 'email': u'*****@*****.**' }, { 'entity_kind': 1L, 'is_prefix': False, 'auto_cc': 1L, 'entity_name': u'libfuzzer_ubsan_lib1', 'email': u'*****@*****.**' }, { 'entity_kind': 1L, 'is_prefix': False, 'auto_cc': 1L, 'entity_name': u'libfuzzer_asan_lib1', 'email': u'*****@*****.**' }, { 'entity_kind': 1L, 'is_prefix': False, 'auto_cc': 1L, 'entity_name': u'libfuzzer_asan_lib1', 'email': u'*****@*****.**' }, { 'entity_kind': 1L, 'is_prefix': False, 'auto_cc': 1L, 'entity_name': u'afl_asan_lib1', 'email': u'*****@*****.**' }, { 'entity_kind': 1L, 'is_prefix': False, 'auto_cc': 1L, 'entity_name': u'afl_asan_lib1', 'email': u'*****@*****.**' }, { 'entity_kind': 1L, 'is_prefix': False, 'auto_cc': 1L, 'entity_name': u'afl_asan_lib1', 'email': u'*****@*****.**' }, { 'entity_kind': 1L, 'is_prefix': False, 'auto_cc': 1L, 'entity_name': u'libfuzzer_msan_lib3', 'email': u'*****@*****.**' }, { 'entity_kind': 1L, 'is_prefix': False, 'auto_cc': 1L, 'entity_name': u'libfuzzer_ubsan_lib3', 'email': u'*****@*****.**' }, { 'entity_kind': 1L, 'is_prefix': False, 'auto_cc': 1L, 'entity_name': u'libfuzzer_asan_lib3', 'email': u'*****@*****.**' }, { 'entity_kind': 1L, 'is_prefix': False, 'auto_cc': 1L, 'entity_name': u'asan_lib4', 'email': u'*****@*****.**' }])
def post(self): """Handle a post request.""" config = db_config.get() if not config: config = data_types.Config() previous_hash = self.request.get('previous_hash') if config.previous_hash and config.previous_hash != previous_hash: raise helpers.EarlyExitException( 'Your change conflicts with another configuration update. ' 'Please refresh and try again.', 500) build_apiary_service_account_private_key = self.request.get( 'build_apiary_service_account_private_key') bug_report_url = self.request.get('bug_report_url') client_credentials = self.request.get('client_credentials') jira_url = self.request.get('jira_url') jira_credentials = self.request.get('jira_credentials') component_repository_mappings = self.request.get( 'component_repository_mappings') contact_string = self.request.get('contact_string') documentation_url = self.request.get('documentation_url') github_credentials = self.request.get('github_credentials') platform_group_mappings = self.request.get('platform_group_mappings') privileged_users = self.request.get('privileged_users') blacklisted_users = self.request.get('blacklisted_users') relax_security_bug_restrictions = self.request.get( 'relax_security_bug_restrictions') relax_testcase_restrictions = self.request.get( 'relax_testcase_restrictions') reproduce_tool_client_id = self.request.get('reproduce_tool_client_id') reproduce_tool_client_secret = self.request.get( 'reproduce_tool_client_secret') reproduction_help_url = self.request.get('reproduction_help_url') test_account_email = self.request.get('test_account_email') test_account_password = self.request.get('test_account_password') wifi_ssid = self.request.get('wifi_ssid') wifi_password = self.request.get('wifi_password') sendgrid_api_key = self.request.get('sendgrid_api_key') sendgrid_sender = self.request.get('sendgrid_sender') config.build_apiary_service_account_private_key = ( build_apiary_service_account_private_key) config.bug_report_url = bug_report_url config.client_credentials = client_credentials config.component_repository_mappings = component_repository_mappings config.contact_string = contact_string config.documentation_url = documentation_url config.github_credentials = github_credentials config.jira_credentials = jira_credentials config.jira_url = jira_url config.platform_group_mappings = platform_group_mappings config.privileged_users = privileged_users config.blacklisted_users = blacklisted_users config.relax_security_bug_restrictions = bool( relax_security_bug_restrictions) config.relax_testcase_restrictions = bool(relax_testcase_restrictions) config.reproduce_tool_client_id = reproduce_tool_client_id config.reproduce_tool_client_secret = reproduce_tool_client_secret config.reproduction_help_url = reproduction_help_url config.test_account_email = test_account_email config.test_account_password = test_account_password config.wifi_ssid = wifi_ssid config.wifi_password = wifi_password config.sendgrid_api_key = sendgrid_api_key config.sendgrid_sender = sendgrid_sender helpers.log('Configuration', helpers.MODIFY_OPERATION) # Before hashing the entity, we must put it so that the internal maps are # updated. config.put() config.previous_hash = utils.entity_hash(config) config.put() template_values = { 'title': 'Success', 'message': ('Configuration is successfully updated. ' 'Redirecting to the configuration page...'), 'redirect_url': '/configuration', } self.render('message.html', template_values)
async def create_db_pool(): db_host = db_config.get('host', '127.0.0.1') database = db_config.get('database') db_user = db_config.get('user') db_pool = await create_pool(host=db_host, database=database, user=db_user) return db_pool
def get_issue_description(testcase, reporter=None, show_reporter=False, hide_crash_state=False): """Returns testcase as string.""" # Get issue tracker configuration parameters. config = db_config.get() domain = get_domain() testcase_id = testcase.key.id() fuzzer_name = testcase.actual_fuzzer_name() download_url = TESTCASE_DOWNLOAD_URL.format( domain=domain, testcase_id=testcase_id) report_url = TESTCASE_REPORT_URL.format( domain=domain, testcase_id=testcase_id) regressed_revision_range_url = TESTCASE_REVISION_RANGE_URL.format( domain=domain, job_type=testcase.job_type, revision_range=testcase.regression) fixed_revision_range_url = TESTCASE_REVISION_RANGE_URL.format( domain=domain, job_type=testcase.job_type, revision_range=testcase.fixed) if testcase.status == 'Unreproducible': return ('Testcase {testcase_id} failed to reproduce the crash. ' 'Please inspect the program output at {report_url}.'.format( testcase_id=testcase_id, report_url=report_url)) # Now create the content string. content_string = 'Detailed report: %s\n\n' % report_url project_name = get_project_name(testcase.job_type) if project_name and project_name != utils.default_project_name(): content_string += 'Project: %s\n' % project_name if fuzzer_name: content_string += 'Fuzzer: %s\n' % fuzzer_name binary_name = testcase.get_metadata('fuzzer_binary_name') if binary_name: content_string += 'Fuzz target binary: %s\n' % binary_name content_string += 'Job Type: %s\n' % testcase.job_type # Add platform id if other than default ones. Only applicable to Android. # e.g. android:shamu_asan if testcase.platform_id: content_string += 'Platform Id: %s\n\n' % testcase.platform_id content_string += 'Crash Type: %s\n' % get_crash_type_string(testcase) content_string += 'Crash Address: %s\n' % testcase.crash_address if hide_crash_state: crash_state = '...see report...' else: crash_state = testcase.crash_state content_string += 'Crash State:\n%s\n' % ( utils.indent_string(crash_state + '\n', 2)) content_string += '%s\n\n' % environment.get_memory_tool_display_string( testcase.job_type) if data_types.SecuritySeverity.is_valid(testcase.security_severity): content_string += ( 'Recommended Security Severity: %s\n\n' % severity_analyzer.severity_to_string(testcase.security_severity)) if (testcase.regression and testcase.regression != 'NA' and not testcase.regression.startswith('0:') and not testcase.regression.endswith('!')): content_string += 'Regressed: %s\n' % regressed_revision_range_url if (testcase.fixed and testcase.fixed != 'NA' and testcase.fixed != 'Yes' and not testcase.fixed.endswith('!')): content_string += 'Fixed: %s\n' % fixed_revision_range_url if not content_string.endswith('\n\n'): content_string += '\n' content_string += 'Reproducer Testcase: %s\n\n' % download_url second_crash_stacktrace = get_stacktrace( testcase, stack_attribute='second_crash_stacktrace') if testcase.one_time_crasher_flag and second_crash_stacktrace: content_string += (second_crash_stacktrace.split('\n'))[0] + '\n\n' if testcase.gestures: content_string += 'Additional requirements: Requires Gestures\n\n' if testcase.http_flag: content_string += 'Additional requirements: Requires HTTP\n\n' if show_reporter: if reporter: content_string += ( 'Issue manually filed by: %s\n\n' % reporter.split('@')[0]) else: content_string += 'Issue filed automatically.\n\n' # Jobs can override the help url. content_string += 'See %s for instructions to reproduce this bug locally.' % ( get_reproduction_help_url(testcase, config)) # Unreproducible crash text is only applicable when we are consistently seeing # it happening, and hence the reason for auto-filing it. Otherwise, someone # filed it manually, so skip the text in that case. if not reporter and testcase.one_time_crasher_flag: content_string += '\n\n' + FILE_UNREPRODUCIBLE_TESTCASE_TEXT return content_string
db = SQLAlchemy() Base = db.Model # Use config module if provided: # db_config = { # 'user' : 'dbuser', # 'password' : 'dbpassword', # 'host' : 'localhost', # 'port' : '3306', # 'name' : 'test_db', # } try: from config import db_config db_url = URL(drivername='mysql+mysqlconnector', username=db_config.get('user'), password=db_config.get('password'), host=db_config.get('host'), port=db_config.get('port'), database=db_config.get('name')) except ImportError: db_url = 'mysql+mysqlconnector://ispyb:[email protected]:3306/ispyb' def init_app(app): """ Initialise the database connection and flask-sqlalchemy """ print("Using database connection URL: {}".format(db_url)) app.config['SQLALCHEMY_DATABASE_URI'] = db_url