def update(group, since): """ Returns all items updated past a specific date in utc. :Parameters: - `since`: a specific date in utc - `group`: group to limit items to """ try: items = Hash.objects( date__gt=datetime.datetime.strptime(since, "%Y-%m-%dT%H:%M:%S"), group=group ) fields = API_UPDATES_DEFAULT_FIELDS fields_arg = request.args.get('fields', None) if fields_arg is not None: fields = [ Hash.modelname(field) for field in fields_arg.replace(' ', '').split(',') ] items = items.only(*fields) return stream_items(items, fields) except Exception as e: current_app.logger.debug(e) return error()
def update_for_group(group, since): """ Returns all items updated past a specific date in utc. :Parameters: - `since`: a specific date in utc - `group`: group to limit items to """ try: items = Hash.objects( date__gt=datetime.datetime.strptime(since, "%Y-%m-%dT%H:%M:%S"), group=group ) fields = None if request.args.get('fields', None): fields = [] for field in request.args.get( 'fields').replace(' ', '').split(','): inmodel = Hash.fieldname(field) if inmodel: fields.append(inmodel) items = items.only(*fields) return stream_items(items, fields) except Exception as e: current_app.logger.debug(e) return error()
def hash_submission(submission_id): """ Helper method to process an archive at source where possible from a submission. """ submission = Submission.objects(id=submission_id).first() if not submission: config.LOGGER.debug('Submission %s not found.' % (submission_id)) return if not submission.entry is None: submission.add_comment('Entry alread exits. Skipping hashing.') return if not isfile(submission.source): submission.add_comment('Source file not found.') return if submission.group not in config.HASHING_COMMANDS: submission.add_comment('Hashing command for this group not found.') return command = config.HASHING_COMMANDS[submission.group].format( archive=submission.source) try: output = check_output(command, shell=True).strip() count = 0 for line in output.split('\n'): json_data = loads(line) json_data['cves'] = submission.cves meta = json_data.get('metadata', []) if isinstance(meta, dict): meta = [meta] json_data['metadata'] = meta entry = Hash() entry.mongify(json_data) entry.status = 'SUBMITTED' entry.submitter = submission.submitter if count > 0: # create a new submission for each embedded entry s = deepcopy(submission) s.id = None else: s = submission s.entry = entry s.approval = 'PENDING_APPROVAL' s.validate() s.save() s.add_comment('Auto hash entry added') count += 1 # we are done safely, now remove the source submission.remove_source_file() except CalledProcessError as e: submission.add_comment(e) config.LOGGER.debug('Command execution failed for "%s"' % (command)) except Exception as e: submission.add_comment(e) config.LOGGER.warn('Failed to hash: ' + e.message)
def get_hash_entry(self): entry = Hash() entry.group = 'ruby' entry.submitter = 'plugin.rubysec' entry.append_cves([ 'CVE-%s' % (cve) for cve in self.cve.strip().split(',') ]) return entry
def hashes(format=None): hashes = Hash.objects(status='RELEASED') group = request.args.get('group', 'all') if group != 'all': if format not in Hash.objects.distinct('group'): flash('Group of hashes not found', 'error') else: hashes = hashes.filter(group=group) return render_template('hashes.html', hashes=hashes)
def submit_hash(group): """ Allows for authenticated users to submit hashes via json. """ user = '******' % api_request_user() try: if group not in groups(): raise ValueError('Invalid group specified') json_data = request.get_json() if 'cves' not in json_data: raise ValueError('No CVE provided') entry = Hash() entry.mongify(json_data) entry.submitter = user submit( user, 'json-api-hash', group, suffix='Hash', entry=entry, approval='PENDING_APPROVAL') return success() except ValueError as ve: return error(ve.message) except Exception as e: current_app.logger.info('Invalid submission by %s' % (user)) current_app.logger.debug(e) return error()
def update(revision): try: revision = int(revision) result = [] for item in Hash.objects(_v1__db_version__gte=int(revision)): newitem = {} newitem['name'] = item['name'] newitem['vendor'] = item['vendor'] newitem['status'] = 'In Database' newitem['format'] = item['format'].upper() newitem['version'] = item['version'] newitem['submitter'] = item['submitter'] newitem['hash'] = item['hashes']['sha512']['combined'] newitem['db_version'] = int(item['_v1']['db_version']) newitem['cves'] = ','.join(item.cve_list()) newitem['submitter'] = str(item['submitter']) result.append({'fields': newitem}) return make_response(json.dumps(result)) except: return error()
def update_front_page_stats(): stats = {} stats['hashes'] = Hash.objects(status='RELEASED').only('group') stats['submitted'] = Submission.objects( approval='REQUESTED').only('group') stats['pending'] = Submission.objects( approval='PENDING_APPROVAL').only('group') # Generate counts for objects and for each format # data will contain hashes, hashes_jars, hashes_eggs etc. groups = SUBMISSION_GROUPS.keys() groups.sort() data = {'groups': groups, 'stats': {}} for group in groups: stat = {} for key in stats: if group == 'all': stat[key] = len(stats[key]) else: stat[key] = len(stats[key].filter(group=group)) data['stats'][group] = stat _CONFIG.front_page_stats = data
def hashes(groups): hashes = Hash.objects( status='RELEASED', group__in=groups ).only('name', 'version', 'hashes.sha512.combined') return render_template('hashes.html', hashes=hashes)
def getInfoFromDb(rowId): return Hash.objects(id=rowId).first()