def tearDown(self): if isdir(UPLOAD_FOLDER): rmtree(UPLOAD_FOLDER) for submission in Submission.objects(submitter=self.username): submission.delete() UserTestCase.tearDown(self)
def hash_submission(submission_id): """ Helper method to process an archive at source where possible from a submission. """ submission = Submission.objects(id=submission_id).first() if not submission: config.LOGGER.debug('Submission %s not found.' % (submission_id)) return if not submission.entry is None: submission.add_comment('Entry alread exits. Skipping hashing.') return if not isfile(submission.source): submission.add_comment('Source file not found.') return if submission.group not in config.HASHING_COMMANDS: submission.add_comment('Hashing command for this group not found.') return command = config.HASHING_COMMANDS[submission.group].format( archive=submission.source) try: output = check_output(command, shell=True).strip() count = 0 for line in output.split('\n'): json_data = loads(line) json_data['cves'] = submission.cves meta = json_data.get('metadata', []) if isinstance(meta, dict): meta = [meta] json_data['metadata'] = meta entry = Hash() entry.mongify(json_data) entry.status = 'SUBMITTED' entry.submitter = submission.submitter if count > 0: # create a new submission for each embedded entry s = deepcopy(submission) s.id = None else: s = submission s.entry = entry s.approval = 'PENDING_APPROVAL' s.validate() s.save() s.add_comment('Auto hash entry added') count += 1 # we are done safely, now remove the source submission.remove_source_file() except CalledProcessError as e: submission.add_comment(e) config.LOGGER.debug('Command execution failed for "%s"' % (command)) except Exception as e: submission.add_comment(e) config.LOGGER.warn('Failed to hash: ' + e.message)
def update_front_page_stats(): stats = {} stats['hashes'] = Hash.objects(status='RELEASED').only('group') stats['submitted'] = Submission.objects( approval='REQUESTED').only('group') stats['pending'] = Submission.objects( approval='PENDING_APPROVAL').only('group') # Generate counts for objects and for each format # data will contain hashes, hashes_jars, hashes_eggs etc. groups = SUBMISSION_GROUPS.keys() groups.sort() data = {'groups': groups, 'stats': {}} for group in groups: stat = {} for key in stats: if group == 'all': stat[key] = len(stats[key]) else: stat[key] = len(stats[key].filter(group=group)) data['stats'][group] = stat _CONFIG.front_page_stats = data
def submit(submitter, source, group=None, filename=None, suffix=None, cves=[], metadata={}, entry=None, approval='REQUESTED'): config.LOGGER.info('Submitting: %s' % ( ', '.join(['%s:%s' % (k, v) for (k, v) in locals().items()]))) submission = Submission() submission.source = source submission.group = group submission.filename = filename if suffix: submission.format = suffix.title() submission.cves = cves if entry and entry.cves: for cve in entry.cves: if cve not in entry.cves: submission.cves.append(cve) submission.metadata = metadata submission.submitter = submitter if entry: submission.entry = entry submission.approval = approval submission.validate() submission.save() set_hash(submission) # ensure index stats are refreshed indexmon.refresh()