class CacheTestClass(object): """Test cache class.""" def __init__(self): self.called = [] @memoize.wrap(memoize.FifoInMemory(10)) def func(self, a, b=2): self.called.append((a, b)) return a + b @memoize.wrap(memoize.FifoInMemory(10)) def none(self, a): self.called.append(a)
def get(self): """Handle a cron job.""" @memoize.wrap(memoize.FifoInMemory(256)) def cc_users_for_job(job_type, security_flag): """Return users to CC for a job.""" # Memoized per cron run. return external_users.cc_users_for_job(job_type, security_flag) for testcase in get_open_testcases_with_bugs(): issue_tracker = issue_tracker_utils.get_issue_tracker_for_testcase( testcase) if not issue_tracker: logging.error('Failed to get issue tracker manager for %s', testcase.key.id()) continue policy = issue_tracker_policy.get(issue_tracker.project) reported_label = policy.label('reported') if not reported_label: return reported_pattern = issue_filer.get_label_pattern(reported_label) try: issue = issue_tracker.get_original_issue(testcase.bug_information) except: logging.error('Error occurred when fetching issue %s.', testcase.bug_information) continue if not issue or not issue.is_open: continue ccs = cc_users_for_job(testcase.job_type, testcase.security_flag) new_ccs = [cc for cc in ccs if cc not in issue.ccs] if not new_ccs: # Nothing to do. continue for cc in new_ccs: logging.info('CCing %s on %s', cc, issue.id) issue.ccs.add(cc) comment = None if not issue.labels.has_with_pattern(reported_pattern): # Add reported label and deadline comment if necessary. for result in issue_filer.apply_substitutions(policy, reported_label, testcase): issue.labels.add(result) if policy.label('restrict_view') in issue.labels: logging.info('Adding deadline comment on %s', issue.id) comment = policy.deadline_policy_message issue.save(new_comment=comment, notify=True)
def gen_func(): """Generate function with memoization.""" del CALLED[:] @memoize.wrap(memoize.FifoInMemory(10)) def func(a, b=2): CALLED.append((a, b)) return a + b return func
def __init__(self, root=None, *args, **kwargs): # pylint: disable=keyword-arg-before-vararg self._root = root.format(*args, **kwargs) if root is not None else None self._config_dir = environment.get_config_directory() self._cache = memoize.FifoInMemory(CACHE_SIZE) # Check that config directory is valid. if not self._config_dir or not os.path.exists(self._config_dir): raise errors.BadConfigError(self._config_dir) # Config roots should exist. if not _validate_root(self._config_dir, self._root): raise errors.BadConfigError(self._config_dir)
def get(self): """Handle a cron job.""" @memoize.wrap(memoize.FifoInMemory(256)) def cc_users_for_job(job_type, security_flag): """Return users to CC for a job.""" # Memoized per cron run. return external_users.cc_users_for_job(job_type, security_flag) for testcase in get_open_testcases_with_bugs(): issue_tracker = issue_tracker_utils.get_issue_tracker_for_testcase( testcase, use_cache=True) if not issue_tracker: logging.error('Failed to get issue tracker manager for %s', testcase.key.id()) continue try: issue = issue_tracker.get_original_issue(testcase.bug_information) except: logging.error('Error occurred when fetching issue %s.', testcase.bug_information) continue if not issue or not issue.is_open: continue ccs = cc_users_for_job(testcase.job_type, testcase.security_flag) new_ccs = [cc for cc in ccs if cc not in issue.ccs] if not new_ccs: # Nothing to do. continue for cc in new_ccs: logging.info('CCing %s on %s', cc, issue.id) issue.ccs.add(cc) comment = None if not issue.labels.has_with_prefix('reported-'): # Add reported label and deadline comment if necessary. issue.labels.add(issue_filer.reported_label()) if 'Restrict-View-Commit' in issue.labels: logging.info('Adding deadline comment on %s', issue.id) comment = issue_filer.DEADLINE_NOTE issue.save(new_comment=comment, notify=True)
class CoverageFieldContext(BuiltinFieldContext): """Coverage field context. Acts as a cache.""" def __init__(self, fuzzer=None, jobs=None): super(CoverageFieldContext, self).__init__(fuzzer=fuzzer, jobs=jobs) @memoize.wrap(memoize.FifoInMemory(256)) def get_coverage_info(self, fuzzer, date=None): """Return coverage info of child fuzzers.""" if fuzzer in data_types.BUILTIN_FUZZERS: # Get coverage info for a job (i.e. a project). job = self.single_job_or_none() project = data_handler.get_project_name(job) return get_coverage_info(project, date) fuzz_target = data_handler.get_fuzz_target(fuzzer) if fuzz_target: fuzzer = fuzz_target.project_qualified_name() return get_coverage_info(fuzzer, date)
class FuzzerRunLogsContext(BuiltinFieldContext): """Fuzzer logs context.""" MEMCACHE_TTL = 15 * 60 def __init__(self, fuzzer=None, jobs=None): super(FuzzerRunLogsContext, self).__init__(fuzzer=fuzzer, jobs=jobs) @memoize.wrap(memoize.FifoInMemory(256)) def _get_logs_bucket_from_job(self, job_type): """Get logs bucket from job.""" return data_handler.get_value_from_job_definition_or_environment( job_type, 'FUZZ_LOGS_BUCKET') @memoize.wrap(memoize.Memcache(MEMCACHE_TTL, key_fn=_logs_bucket_key_fn)) def _get_logs_bucket_from_fuzzer(self, fuzzer_name): """Get logs bucket from fuzzer (child fuzzers only).""" jobs = [ mapping.job for mapping in fuzz_target_utils.get_fuzz_target_jobs( fuzz_target_name=fuzzer_name) ] if not jobs: return None # Check that the logs bucket is same for all of them. bucket = self._get_logs_bucket_from_job(jobs[0]) if all(bucket == self._get_logs_bucket_from_job(job) for job in jobs[1:]): return bucket return None def get_logs_bucket(self, fuzzer_name=None, job_type=None): """Return logs bucket for the job.""" if job_type: return self._get_logs_bucket_from_job(job_type) if fuzzer_name: return self._get_logs_bucket_from_fuzzer(fuzzer_name) return None
file_handle = open(file_path, file_mode) file_handle.write(content_string) file_handle.close() except: logs.log_warn('Error occurred while writing %s, retrying.' % file_path) time.sleep(random.uniform(1, failure_wait_interval)) continue # Successfully written data file. return logs.log_error('Failed to write data to file %s.' % file_path) @memoize.wrap(memoize.FifoInMemory(1)) def default_project_name(): """Return the default project name for this instance of ClusterFuzz.""" # Do not use |PROJECT_NAME| environment variable as that is the overridden # project name from job type and is not the default project name. return local_config.ProjectConfig().get('env.PROJECT_NAME') def current_project(): """Return the project for the current job, or the default project.""" return environment.get_value('PROJECT_NAME', default_project_name()) def current_source_version(): """Return the current source revision.""" # For test use.
def setUp(self): self.cache = memoize.FifoInMemory(5)