def _demote_back_to_testing(fw): # from the server admin user = db.session.query(User).filter(User.username == '*****@*****.**').first() if not user: return # send email to uploading user if fw.user.get_action('notify-demote-failures'): send_email("[LVFS] Firmware has been demoted", fw.user.email_address, render_template('email-firmware-demote.txt', user=fw.user, fw=fw)) fw.mark_dirty() remote = db.session.query(Remote).filter(Remote.name == 'testing').first() remote.is_dirty = True # asynchronously sign straight away, even public remotes for r in set([remote, fw.remote]): r.is_dirty = True _async_regenerate_remote.apply_async(args=(r.remote_id,), queue='metadata', countdown=1) fw.remote_id = remote.remote_id fw.events.append(FirmwareEvent(remote_id=fw.remote_id, user_id=user.user_id)) db.session.commit() _event_log('Demoted firmware {} as reported success {}%'.format(fw.firmware_id, fw.success))
def route_login_oauth_authorized(plugin_id): # find the plugin that can authenticate us p = ploader.get_by_id(plugin_id) if not p: _error_internal('no plugin {}'.format(plugin_id)) if not hasattr(p, 'oauth_get_data'): return _error_internal( 'no oauth support in plugin {}'.format(plugin_id)) try: data = p.oauth_get_data() if 'userPrincipalName' not in data: return _error_internal('No userPrincipalName in profile') except PluginError as e: return _error_internal(str(e)) # auth check created_account = False user = db.session.query(User).filter( User.username == data['userPrincipalName']).first() if not user: user = _create_user_for_oauth_username(data['userPrincipalName']) if user: db.session.add(user) db.session.commit() _event_log('Auto created user of type %s for vendor %s' % (user.auth_type, user.vendor.group_id)) created_account = True if not user: flash('Failed to log in: no user for %s' % data['userPrincipalName'], 'danger') return redirect(url_for('main.route_index')) if not user.auth_type: flash('Failed to log in: User account %s is disabled' % user.username, 'danger') return redirect(url_for('main.route_index')) if user.auth_type != 'oauth': flash('Failed to log in: Only some accounts can log in using OAuth', 'danger') return redirect(url_for('main.route_index')) # sync the display name if 'displayName' in data: if user.display_name != data['displayName']: user.display_name = data['displayName'] db.session.commit() # success login_user(user, remember=False) g.user = user if created_account: flash('Logged in, and created account', 'info') else: flash('Logged in', 'info') # set the access time user.atime = datetime.datetime.utcnow() db.session.commit() return redirect(url_for('main.route_dashboard'))
def _check_firmware(): # ensure the test has been added for the firmware type fws = db.session.query(Firmware).all() for fw in fws: if fw.is_deleted: continue ploader.ensure_test_for_fw(fw) db.session.commit() # make a list of all the tests that need running test_fws = {} for fw in fws: for test in fw.tests: if test.needs_running: if fw in test_fws: test_fws[fw].append(test) else: test_fws[fw] = [test] # mark all the tests as started for fw in test_fws: for test in test_fws[fw]: print('Marking test %s started for firmware %u...' % (test.plugin_id, fw.firmware_id)) test.started_ts = datetime.datetime.utcnow() db.session.commit() # process each test for fw in test_fws: for test in sorted(test_fws[fw], key=_test_priority_sort_func): plugin = ploader.get_by_id(test.plugin_id) if not plugin: _event_log('No plugin %s' % test.plugin_id) test.ended_ts = datetime.datetime.utcnow() continue if not hasattr(plugin, 'run_test_on_fw'): _event_log('No run_test_on_fw in %s' % test.plugin_id) test.ended_ts = datetime.datetime.utcnow() continue try: print('Running test %s for firmware %s' % (test.plugin_id, fw.firmware_id)) plugin.run_test_on_fw(test, fw) test.ended_ts = datetime.datetime.utcnow() # don't leave a failed task running db.session.commit() except Exception as e: # pylint: disable=broad-except test.ended_ts = datetime.datetime.utcnow() test.add_fail('An exception occurred', str(e)) # unallocate the cached blob as it's no longer needed fw.blob = None # all done db.session.commit()
def _user_disable_actual(): # find all users that have an atime greater than 1 year and unused_notify_ts > 6 weeks */ now = datetime.datetime.utcnow() for user in db.session.query(User)\ .filter(User.auth_type != 'disabled')\ .filter(User.atime < now - datetime.timedelta(days=365))\ .filter(User.unused_notify_ts < now - datetime.timedelta(days=42)): _event_log('Disabling user {} {} ({}) as unused'.format( user.user_id, user.username, user.display_name)) user.auth_type = 'disabled' user.username = '******'.format(user.user_id) user.display_name = 'Disabled User {}'.format(user.user_id) db.session.commit()
def _regenerate_and_sign_metadata(only_embargo=False): # get list of dirty remotes remotes = [] for r in db.session.query(Remote): if not r.is_signed: continue # fix up any remotes that are not dirty, but have firmware that is dirty # -- which shouldn't happen, but did... if not r.is_dirty: for fw in r.fws: if not fw.is_dirty: continue print('Marking remote %s as dirty due to %u' % (r.name, fw.firmware_id)) r.is_dirty = True if r.is_dirty: if r.is_public and only_embargo: continue remotes.append(r) # nothing to do if not remotes: return # update everything required for r in remotes: print('Updating: %s' % r.name) _metadata_update_targets(remotes) for r in remotes: if r.name == 'stable': _metadata_update_pulp() # sign and sync download_dir = app.config['DOWNLOAD_DIR'] for r in remotes: ploader.file_modified(os.path.join(download_dir, r.filename)) # mark as no longer dirty for r in remotes: r.is_dirty = False db.session.commit() # drop caches in other sessions db.session.expire_all() # log what we did for r in remotes: _event_log('Signed metadata %s' % r.name)
def _demote_back_to_embargo(fw): # send email to uploading user if fw.user.notify_demote_failures: send_email("[LVFS] Firmware has been demoted", fw.user.email_address, render_template('email-firmware-demote.txt', user=fw.user, fw=fw)) fw.mark_dirty() remote = fw.vendor.remote remote.is_dirty = True fw.remote_id = remote.remote_id fw.events.append(FirmwareEvent(fw.remote_id)) db.session.commit() _event_log('Demoted firmware {} as reported success {}%%'.format(fw.firmware_id, fw.success))
def _test_run_all(tests=None): # make a list of the first few tests that need running if not tests: tests = db.session.query(Test)\ .filter(Test.started_ts == None)\ .order_by(Test.scheduled_ts)\ .limit(50).all() # mark all the tests as started for test in tests: print('Marking test {} started for firmware {}...'.format( test.plugin_id, test.fw.firmware_id)) test.started_ts = datetime.datetime.utcnow() db.session.commit() # process each test for test in sorted(tests, key=_test_priority_sort_func): plugin = ploader.get_by_id(test.plugin_id) if not plugin: _event_log('No plugin %s' % test.plugin_id) test.ended_ts = datetime.datetime.utcnow() continue try: print('Running test {} for firmware {}'.format( test.plugin_id, test.fw.firmware_id)) if hasattr(plugin, 'run_test_on_fw'): if hasattr(plugin, 'require_test_for_fw'): if not plugin.require_test_for_fw(test.fw): continue plugin.run_test_on_fw(test, test.fw) if hasattr(plugin, 'run_test_on_md'): for md in test.fw.mds: if hasattr(plugin, 'require_test_for_md'): if not plugin.require_test_for_md(md): continue plugin.run_test_on_md(test, md) test.ended_ts = datetime.datetime.utcnow() # don't leave a failed task running db.session.commit() except Exception as e: # pylint: disable=broad-except test.ended_ts = datetime.datetime.utcnow() test.add_fail('An exception occurred', str(e)) # all done db.session.commit()
def _regenerate_and_sign_firmware(): # find all unsigned firmware fws = db.session.query(Firmware).\ filter(Firmware.signed_timestamp == None).all() if not fws: return # sign each firmware in each file for fw in fws: if fw.is_deleted: continue print('Signing firmware %u...' % fw.firmware_id) _sign_fw(fw) _event_log('Signed firmware %s' % fw.firmware_id) # drop caches in other sessions db.session.expire_all()
def route_modify(plugin_id='general'): """ Change details about the instance """ # only accept form data if request.method != 'POST': return redirect(url_for('settings.route_view', plugin_id=plugin_id)) # save new values settings = _get_settings() for key in request.form: if key == 'csrf_token': continue if settings[key] == request.form[key]: continue setting = db.session.query(Setting).filter(Setting.key == key).first() setting.value = _textarea_string_to_text(request.form[key]) _event_log('Changed server settings %s to %s' % (key, setting.value)) db.session.commit() flash('Updated settings', 'info') return redirect(url_for('settings.route_view', plugin_id=plugin_id), 302)
def _firmware_delete(fw): # find private remote remote = db.session.query(Remote).filter(Remote.name == 'deleted').first() if not remote: _event_log('No deleted remote') return # move file so it's no longer downloadable path = os.path.join(app.config['DOWNLOAD_DIR'], fw.filename) if os.path.exists(path): path_new = os.path.join(app.config['RESTORE_DIR'], fw.filename) shutil.move(path, path_new) # generate next cron run fw.mark_dirty() # mark as invalid fw.remote_id = remote.remote_id fw.events.append(FirmwareEvent(fw.remote_id, g.user.user_id))
def _demote_back_to_testing(fw): # from the server admin user = db.session.query(User).filter(User.username == '*****@*****.**').first() if not user: return # send email to uploading user if fw.user.get_action('notify-demote-failures'): send_email("[LVFS] Firmware has been demoted", fw.user.email_address, render_template('email-firmware-demote.txt', user=fw.user, fw=fw)) fw.mark_dirty() remote = db.session.query(Remote).filter(Remote.name == 'testing').first() remote.is_dirty = True fw.remote_id = remote.remote_id fw.events.append(FirmwareEvent(remote_id=fw.remote_id, user_id=user.user_id)) db.session.commit() _event_log('Demoted firmware {} as reported success {}%'.format(fw.firmware_id, fw.success))
def _check_firmware(): # make a list of all the tests that need running tests = db.session.query(Test).filter(Test.started_ts == None).all() # mark all the tests as started for test in tests: print('Marking test {} started for firmware {}...'.format( test.plugin_id, test.fw.firmware_id)) test.started_ts = datetime.datetime.utcnow() db.session.commit() # process each test for test in sorted(tests, key=_test_priority_sort_func): plugin = ploader.get_by_id(test.plugin_id) if not plugin: _event_log('No plugin %s' % test.plugin_id) test.ended_ts = datetime.datetime.utcnow() continue if not hasattr(plugin, 'run_test_on_fw'): _event_log('No run_test_on_fw in %s' % test.plugin_id) test.ended_ts = datetime.datetime.utcnow() continue try: print('Running test {} for firmware {}'.format( test.plugin_id, test.fw.firmware_id)) plugin.run_test_on_fw(test, test.fw) test.ended_ts = datetime.datetime.utcnow() # don't leave a failed task running db.session.commit() except Exception as e: # pylint: disable=broad-except test.ended_ts = datetime.datetime.utcnow() test.add_fail('An exception occurred', str(e)) # all done db.session.commit()
def _sign_fw(fw): # load the .cab file download_dir = app.config['DOWNLOAD_DIR'] fn = os.path.join(download_dir, fw.filename) try: with open(fn, 'rb') as f: cabarchive = CabArchive(f.read()) except IOError as e: raise NotImplementedError('cannot read %s: %s' % (fn, str(e))) # create Jcat file jcatfile = JcatFile() # sign each component in the archive print('Signing: %s' % fn) for md in fw.mds: try: # create Jcat item with SHA1 and SHA256 checksum blob cabfile = cabarchive[md.filename_contents] jcatitem = jcatfile.get_item(md.filename_contents) jcatitem.add_blob(JcatBlobSha1(cabfile.buf)) jcatitem.add_blob(JcatBlobSha256(cabfile.buf)) # sign using plugins for blob in ploader.archive_sign(cabfile.buf): # add GPG only to archive for backwards compat with older fwupd if blob.kind == JcatBlobKind.GPG: fn_blob = md.filename_contents + '.' + blob.filename_ext cabarchive[fn_blob] = CabFile(blob.data) # add to Jcat file too jcatitem.add_blob(blob) except KeyError as _: raise NotImplementedError('no {} firmware found'.format(md.filename_contents)) # rewrite the metainfo.xml file to reflect latest changes and sign it for md in fw.mds: # write new metainfo.xml file component = _generate_metadata_mds([md], metainfo=True) blob_xml = b'<?xml version="1.0" encoding="UTF-8"?>\n' + \ ET.tostring(component, encoding='UTF-8', xml_declaration=False, pretty_print=True) _show_diff(cabarchive[md.filename_xml].buf, blob_xml) cabarchive[md.filename_xml].buf = blob_xml # sign it jcatitem = jcatfile.get_item(md.filename_xml) jcatitem.add_blob(JcatBlobSha1(blob_xml)) jcatitem.add_blob(JcatBlobSha256(blob_xml)) for blob in ploader.archive_sign(blob_xml): jcatitem.add_blob(blob) # write jcat file if jcatfile.items: cabarchive['firmware.jcat'] = CabFile(jcatfile.save()) # overwrite old file cab_data = cabarchive.save() with open(fn, 'wb') as f: f.write(cab_data) # inform the plugin loader ploader.file_modified(fn) # update the download size for md in fw.mds: md.release_download_size = len(cab_data) # update the database fw.checksum_signed_sha1 = hashlib.sha1(cab_data).hexdigest() fw.checksum_signed_sha256 = hashlib.sha256(cab_data).hexdigest() fw.signed_timestamp = datetime.datetime.utcnow() db.session.commit() # log _event_log('Signed firmware %s' % fw.firmware_id)
def route_report(): """ Upload a report """ # only accept form data if request.method != 'POST': return _json_error('only POST supported') # parse both content types, either application/json or multipart/form-data signature = None if request.data: payload = request.data.decode('utf8') elif request.form: data = request.form.to_dict() if 'payload' not in data: return _json_error('No payload in multipart/form-data') payload = data['payload'] if 'signature' in data: signature = data['signature'] else: return _json_error('No data') # find user and verify crt = None if signature: try: info = _pkcs7_signature_info(signature, check_rc=False) except IOError as e: return _json_error('Signature invalid: %s' % str(e)) if 'serial' not in info: return _json_error('Signature invalid, no signature') crt = db.session.query(Certificate).filter( Certificate.serial == info['serial']).first() if crt: try: _pkcs7_signature_verify(crt, payload, signature) except IOError as _: return _json_error('Signature did not validate') # parse JSON data try: item = json.loads(payload) except ValueError as e: return _json_error('No JSON object could be decoded: ' + str(e)) # check we got enough data for key in ['ReportVersion', 'MachineId', 'Reports', 'Metadata']: if not key in item: return _json_error('invalid data, expected %s' % key) if item[key] is None: return _json_error('missing data, expected %s' % key) # parse only this version if item['ReportVersion'] != 2: return _json_error('report version not supported') # add each firmware report machine_id = item['MachineId'] reports = item['Reports'] if len(reports) == 0: return _json_error('no reports included') metadata = item['Metadata'] if len(metadata) == 0: return _json_error('no metadata included') msgs = [] uris = [] for report in reports: for key in ['Checksum', 'UpdateState', 'Metadata']: if not key in report: return _json_error('invalid data, expected %s' % key) if report[key] is None: return _json_error('missing data, expected %s' % key) # flattern the report including the per-machine and per-report metadata data = metadata for key in report: # don't store some data if key in [ 'Created', 'Modified', 'BootTime', 'UpdateState', 'DeviceId', 'UpdateState', 'DeviceId', 'Checksum' ]: continue if key == 'Metadata': md = report[key] for md_key in md: data[md_key] = md[md_key] continue # allow array of strings for any of the keys if isinstance(report[key], list): data[key] = ','.join(report[key]) else: data[key] = report[key] # try to find the checksum (which might not exist on this server) fw = db.session.query(Firmware).filter( Firmware.checksum_signed_sha1 == report['Checksum']).first() if not fw: fw = db.session.query(Firmware).filter( Firmware.checksum_signed_sha256 == report['Checksum']).first() if not fw: msgs.append('%s did not match any known firmware archive' % report['Checksum']) continue # cannot report this failure if fw.do_not_track: msgs.append('%s will not accept reports' % report['Checksum']) continue # update the device checksums if there is only one component if crt and crt.user.check_acl( '@qa') and 'ChecksumDevice' in data and len(fw.mds) == 1: md = fw.md_prio found = False # fwupd v1.2.6 sends an array of strings, before that just a string checksums = data['ChecksumDevice'] if not isinstance(checksums, list): checksums = [checksums] # does the submitted checksum already exist as a device checksum for checksum in checksums: for csum in md.device_checksums: if csum.value == checksum: found = True break if found: continue _event_log('added device checksum %s to firmware %s' % (checksum, md.fw.checksum_upload_sha1)) if _is_sha1(checksum): md.device_checksums.append(Checksum(checksum, 'SHA1')) elif _is_sha256(checksum): md.device_checksums.append(Checksum(checksum, 'SHA256')) # find any matching report issue_id = 0 if report['UpdateState'] == 3: issue = _find_issue_for_report_data(data, fw) if issue: issue_id = issue.issue_id msgs.append('The failure is a known issue') uris.append(issue.url) # update any old report r = db.session.query(Report).\ filter(Report.checksum == report['Checksum']).\ filter(Report.machine_id == machine_id).first() if r: msgs.append('%s replaces old report' % report['Checksum']) r.state = report['UpdateState'] for e in r.attributes: db.session.delete(e) else: # save a new report in the database r = Report(machine_id=machine_id, firmware_id=fw.firmware_id, issue_id=issue_id, state=report['UpdateState'], checksum=report['Checksum']) # update the firmware so that the QA user does not have to wait 24h if r.state == 2: fw.report_success_cnt += 1 elif r.state == 3: if r.issue_id: fw.report_issue_cnt += 1 else: fw.report_failure_cnt += 1 # update the LVFS user if crt: r.user_id = crt.user_id # save all the report entries for key in data: r.attributes.append(ReportAttribute(key=key, value=data[key])) db.session.add(r) # all done db.session.commit() # put messages and URIs on one line return _json_success(msg='; '.join(msgs) if msgs else None, uri='; '.join(uris) if uris else None)
def _regenerate_and_sign_metadata(only_embargo=False): # get list of dirty remotes remotes = [] for r in db.session.query(Remote): if not r.is_signed: continue # fix up any remotes that are not dirty, but have firmware that is dirty # -- which shouldn't happen, but did... if not r.is_dirty: for fw in r.fws: if not fw.is_dirty: continue print('Marking remote %s as dirty due to %u' % (r.name, fw.firmware_id)) r.is_dirty = True if r.is_dirty: if r.is_public and only_embargo: continue remotes.append(r) # nothing to do if not remotes: return # set destination path from app config download_dir = app.config['DOWNLOAD_DIR'] if not os.path.exists(download_dir): os.mkdir(download_dir) # update everything required invalid_fns = [] for r in remotes: print('Updating: %s' % r.name) for r, blob_xmlgz in _metadata_update_targets(remotes): # write metadata-?????.xml.gz fn_xmlgz = os.path.join(download_dir, r.filename) with open(fn_xmlgz, 'wb') as f: f.write(blob_xmlgz) invalid_fns.append(fn_xmlgz) # write metadata.xml.gz fn_xmlgz = os.path.join(download_dir, r.filename_newest) with open(fn_xmlgz, 'wb') as f: f.write(blob_xmlgz) invalid_fns.append(fn_xmlgz) # create Jcat item with SHA256 checksum blob jcatfile = JcatFile() jcatitem = jcatfile.get_item(r.filename) jcatitem.add_alias_id(r.filename_newest) jcatitem.add_blob(JcatBlobSha1(blob_xmlgz)) jcatitem.add_blob(JcatBlobSha256(blob_xmlgz)) # write each signed file for blob in ploader.metadata_sign(blob_xmlgz): # add GPG only to archive for backwards compat with older fwupd if blob.kind == JcatBlobKind.GPG: fn_xmlgz_asc = fn_xmlgz + '.' + blob.filename_ext with open(fn_xmlgz_asc, 'wb') as f: f.write(blob.data) invalid_fns.append(fn_xmlgz_asc) # add to Jcat file too jcatitem.add_blob(blob) # write jcat file fn_xmlgz_jcat = fn_xmlgz + '.jcat' with open(fn_xmlgz_jcat, 'wb') as f: f.write(jcatfile.save()) invalid_fns.append(fn_xmlgz_jcat) # update PULP for r in remotes: if r.name == 'stable': _metadata_update_pulp(download_dir) # do this all at once right at the end of all the I/O for fn in invalid_fns: print('Invalidating {}'.format(fn)) ploader.file_modified(fn) # mark as no longer dirty for r in remotes: if not r.build_cnt: r.build_cnt = 0 r.build_cnt += 1 r.is_dirty = False db.session.commit() # drop caches in other sessions db.session.expire_all() # log what we did for r in remotes: _event_log('Signed metadata {} build {}'.format(r.name, r.build_cnt)) # only keep the last 6 metadata builds (24h / stable refresh every 4h) for r in remotes: if not r.filename: continue suffix = r.filename.split('-')[2] fns = glob.glob( os.path.join(download_dir, 'firmware-*-{}'.format(suffix))) for fn in sorted(fns): build_cnt = int(fn.split('-')[1]) if build_cnt + 6 > r.build_cnt: continue os.remove(fn) _event_log('Deleted metadata {} build {}'.format( r.name, build_cnt))
def _regenerate_and_sign_metadata_remote(r): # not required */ if not r.is_signed: return # fix up any remotes that are not dirty, but have firmware that is dirty # -- which shouldn't happen, but did... if not r.is_dirty: for fw in r.fws: if not fw.is_dirty: continue print('Marking remote %s as dirty due to %u' % (r.name, fw.firmware_id)) r.is_dirty = True fw.is_dirty = False # not needed if not r.is_dirty: return # set destination path from app config download_dir = app.config['DOWNLOAD_DIR'] if not os.path.exists(download_dir): os.mkdir(download_dir) invalid_fns = [] print('Updating: %s' % r.name) # create metadata for each remote fws_filtered = [] for fw in db.session.query(Firmware): if fw.remote.name in ['private', 'deleted']: continue if not fw.signed_timestamp: continue if r.check_fw(fw): fws_filtered.append(fw) settings = _get_settings() blob_xmlgz = _generate_metadata_kind(fws_filtered, firmware_baseuri=settings['firmware_baseuri']) # write metadata-?????.xml.gz fn_xmlgz = os.path.join(download_dir, r.filename) with open(fn_xmlgz, 'wb') as f: f.write(blob_xmlgz) invalid_fns.append(fn_xmlgz) # write metadata.xml.gz fn_xmlgz = os.path.join(download_dir, r.filename_newest) with open(fn_xmlgz, 'wb') as f: f.write(blob_xmlgz) invalid_fns.append(fn_xmlgz) # create Jcat item with SHA256 checksum blob jcatfile = JcatFile() jcatitem = jcatfile.get_item(r.filename) jcatitem.add_alias_id(r.filename_newest) jcatitem.add_blob(JcatBlobSha1(blob_xmlgz)) jcatitem.add_blob(JcatBlobSha256(blob_xmlgz)) # write each signed file for blob in ploader.metadata_sign(blob_xmlgz): # add GPG only to archive for backwards compat with older fwupd if blob.kind == JcatBlobKind.GPG: fn_xmlgz_asc = fn_xmlgz + '.' + blob.filename_ext with open(fn_xmlgz_asc, 'wb') as f: f.write(blob.data) invalid_fns.append(fn_xmlgz_asc) # add to Jcat file too jcatitem.add_blob(blob) # write jcat file fn_xmlgz_jcat = fn_xmlgz + '.jcat' with open(fn_xmlgz_jcat, 'wb') as f: f.write(jcatfile.save()) invalid_fns.append(fn_xmlgz_jcat) # update PULP if r.name == 'stable': _metadata_update_pulp(download_dir) # do this all at once right at the end of all the I/O for fn in invalid_fns: print('Invalidating {}'.format(fn)) ploader.file_modified(fn) # mark as no longer dirty if not r.build_cnt: r.build_cnt = 0 r.build_cnt += 1 r.is_dirty = False # log what we did _event_log('Signed metadata {} build {}'.format(r.name, r.build_cnt)) # only keep the last 6 metadata builds (24h / stable refresh every 4h) suffix = r.filename.split('-')[2] fns = glob.glob(os.path.join(download_dir, 'firmware-*-{}'.format(suffix))) for fn in sorted(fns): build_cnt = int(fn.split('-')[1]) if build_cnt + 6 > r.build_cnt: continue os.remove(fn) _event_log('Deleted metadata {} build {}'.format(r.name, build_cnt)) # all firmwares are contained in the correct metadata now for fw in fws_filtered: fw.is_dirty = False db.session.commit()
def flash_save_eventlog(unused_sender, message, category, **unused_extra): is_important = False if category in ['danger', 'warning']: is_important = True _event_log(str(message), is_important)