def archive_finalize(self, arc, metadata): # get settings settings = _get_settings('info_readme') if settings['info_readme_enable'] != 'enabled': return None if not settings['info_readme_filename']: raise PluginError('No filename set') if not settings['info_readme_template']: raise PluginError('No template set') # does the readme file already exist? if _archive_get_files_from_glob(arc, settings['info_readme_filename']): print("archive already has %s" % settings['info_readme_filename']) return # read in the file and do substititons try: template = open(settings['info_readme_template'], 'rb').read() except IOError as e: raise PluginError(e) for key in metadata: template = template.replace(key, metadata[key]) # add it to the archive _archive_add(arc, settings['info_readme_filename'], template.encode('utf-8'))
def file_modified(self, fn): # is the file in the whitelist settings = _get_settings('cdn_sync') if settings['cdn_sync_enable'] != 'enabled': return fns = settings['cdn_sync_files'] if not fns: raise PluginError('No file whitelist set') basename = os.path.basename(fn) if basename not in fns.split(','): print('%s not in %s' % (basename, fns)) return # bucket not set if not settings['cdn_sync_bucket']: raise PluginError('No bucket set') # upload try: key = os.path.join(settings['cdn_sync_folder'], os.path.basename(fn)) session = boto3.Session( aws_access_key_id=settings['cdn_sync_username'], aws_secret_access_key=settings['cdn_sync_password'], region_name=settings['cdn_sync_region']) s3 = session.resource('s3') bucket = s3.Bucket(settings['cdn_sync_bucket']) bucket.Acl().put(ACL='public-read') print("uploading %s as %s" % (fn, key)) blob = open(fn, 'rb').read() obj = bucket.put_object(Key=key, Body=blob) obj.Acl().put(ACL='public-read') except BaseException as e: raise PluginError(e)
def archive_copy(self, arc, firmware_cff): settings = _get_settings('wu_copy') fn = _get_basename_safe(firmware_cff.get_name()) if fn.endswith('.inf') and settings['wu_copy_inf'] == 'enabled': _archive_add(arc, fn, firmware_cff.get_bytes().get_data()) if fn.endswith('.cat') and settings['wu_copy_cat'] == 'enabled': _archive_add(arc, fn, firmware_cff.get_bytes().get_data())
def _metadata_modified(self, fn): # plugin not enabled settings = _get_settings('sign_sigul') if settings['sign_sigul_enable'] != 'enabled': return # generate blob_asc = _sigul_detached_sign_data(open(fn, 'rb').read(), settings['sign_sigul_config_file'], settings['sign_sigul_metadata_key']) fn_asc = fn + '.asc' with open(fn_asc, 'w') as f: f.write(blob_asc) # inform the plugin loader ploader.file_modified(fn_asc)
def archive_sign(self, arc, firmware_cff): # plugin not enabled settings = _get_settings('sign_sigul') if settings['sign_sigul_enable'] != 'enabled': return # already signed detached_fn = _get_basename_safe(firmware_cff.get_name() + '.asc') if _archive_get_files_from_glob(arc, detached_fn): return # create the detached signature blob_asc = _sigul_detached_sign_data(firmware_cff.get_bytes().get_data(), settings['sign_sigul_config_file'], settings['sign_sigul_firmware_key']) # add it to the archive _archive_add(arc, detached_fn, blob_asc.encode('utf-8'))
def _sign_blob(self, contents): # get settings settings = _get_settings('sign_pkcs7') if settings['sign_pkcs7_enable'] != 'enabled': return None if not settings['sign_pkcs7_privkey']: raise PluginError('No private key set') if not settings['sign_pkcs7_certificate']: raise PluginError('No certificate set') # write firmware to temp file src = tempfile.NamedTemporaryFile(mode='wb', prefix='pkcs7_', suffix=".bin", dir=None, delete=True) src.write(contents) src.flush() # get p7b file from temp file dst = tempfile.NamedTemporaryFile(mode='wb', prefix='pkcs7_', suffix=".p7b", dir=None, delete=True) # sign argv = [ 'certtool', '--p7-detached-sign', '--p7-time', '--load-privkey', settings['sign_pkcs7_privkey'], '--load-certificate', settings['sign_pkcs7_certificate'], '--infile', src.name, '--outfile', dst.name ] ps = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if ps.wait() != 0: raise PluginError('Failed to sign: %s' % ps.stderr.read()) # read back the temp file return open(dst.name, 'rb').read()
def file_modified(self, fn): # is the file in the whitelist settings = _get_settings('cdn_purge') if settings['cdn_purge_enable'] != 'enabled': return fns = settings['cdn_purge_files'] if not fns: raise PluginError('No file whitelist set') basename = os.path.basename(fn) if not _basename_matches_globs(basename, fns.split(',')): print('%s not in %s' % (basename, fns)) return # URI not set if not settings['cdn_purge_uri']: raise PluginError('No URI set') if not settings['cdn_purge_method']: raise PluginError('No request method set') # purge url = settings['cdn_purge_uri'] + basename headers = {} if settings['cdn_purge_accesskey']: headers['AccessKey'] = settings['cdn_purge_accesskey'] r = requests.request(settings['cdn_purge_method'], url, headers=headers) if r.text: try: response = json.loads(r.text) if response['status'] != 'ok': raise PluginError('Failed to purge metadata on CDN: ' + r.text) except ValueError as e: # BunnyCDN doesn't sent a JSON blob raise PluginError('Failed to purge metadata on CDN: %s: %s' % (r.text, str(e)))
def _metadata_modified(self, fn): # plugin not enabled settings = _get_settings('sign_gpg') if settings['sign_gpg_enable'] != 'enabled': return # generate if not settings['sign_gpg_keyring_dir']: raise PluginError('No keyring directory set') if not settings['sign_gpg_metadata_uid']: raise PluginError('No metadata signing UID set') affidavit = Affidavit(settings['sign_gpg_metadata_uid'], settings['sign_gpg_keyring_dir']) if not affidavit: return blob = open(fn, 'rb').read() blob_asc = affidavit.create(blob) fn_asc = fn + '.asc' with open(fn_asc, 'w') as f: f.write(blob_asc) # inform the plugin loader ploader.file_modified(fn_asc)
def archive_sign(self, arc, firmware_cff): # plugin not enabled settings = _get_settings('sign_gpg') if settings['sign_gpg_enable'] != 'enabled': return # already signed detached_fn = _get_basename_safe(firmware_cff.get_name() + '.asc') if _archive_get_files_from_glob(arc, detached_fn): return # create the detached signature if not settings['sign_gpg_keyring_dir']: raise PluginError('No keyring directory set') if not settings['sign_gpg_firmware_uid']: raise PluginError('No firmware signing UID set') affidavit = Affidavit(settings['sign_gpg_firmware_uid'], settings['sign_gpg_keyring_dir']) contents = firmware_cff.get_bytes().get_data() contents_asc = affidavit.create(contents) # add it to the archive _archive_add(arc, detached_fn, contents_asc.encode('utf-8'))
def file_modified(self, fn): # is the file in the whitelist settings = _get_settings('cdn_purge') if settings['cdn_purge_enable'] != 'enabled': return fns = settings['cdn_purge_files'] if not fns: raise PluginError('No file whitelist set') basename = os.path.basename(fn) if not _basename_matches_globs(basename, fns.split(',')): print('%s not in %s' % (basename, fns)) return # URI not set if not settings['cdn_purge_uri']: raise PluginError('No URI set') # purge url = settings['cdn_purge_uri'] + basename headers = {'AccessKey': settings['cdn_purge_accesskey']} r = requests.get(url, headers=headers) if r.text: raise PluginError('Failed to purge metadata on CDN: ' + r.text)
# # Copyright (C) 2018 Richard Hughes <*****@*****.**> # Licensed under the GNU General Public License Version 2 # # pylint: disable=no-self-use,no-member,unexpected-keyword-arg import uuid from flask import session, request from flask_oauthlib.client import OAuth, OAuthException from app import oauth from app.pluginloader import PluginBase, PluginSettingBool, PluginSettingText, PluginError from app.util import _get_settings settings = _get_settings('auth_azure') if 'auth_azure_consumer_key' in settings and settings['auth_azure_consumer_key']: remote_app = oauth.remote_app( 'microsoft', consumer_key=settings['auth_azure_consumer_key'], consumer_secret=settings['auth_azure_consumer_secret'], request_token_params={'scope': 'offline_access User.Read'}, base_url='https://graph.microsoft.com/v1.0/', request_token_url=None, access_token_method='POST', access_token_url='https://login.microsoftonline.com/common/oauth2/v2.0/token', authorize_url='https://login.microsoftonline.com/common/oauth2/v2.0/authorize' ) @remote_app.tokengetter def get_auth_azure_token():