def build_occplug(): header("BUILDING OCCPLUG") config.refresh() config.rebase('TEMP', '@TEMP_ROOT@/occPlug') remove_dir(config.get('TEMP')) src_ext = '/tools/occplug' dist_ext = '/distribution/deb-pkg' #srcpath = concat([config.get('TEMP'), '/src', src_ext]) #mkdir(srcpath) #distpath = concat([config.get('TEMP'), '/src', dist_ext]) #mkdir(distpath) config.set('OCCPLUG_DISTRIBUTION', concat([config.get('SVN'), dist_ext])) #config.rebase('SOURCE_OCCPLUG', srcpath) make_destdirs() remove_dir(concat([config.get('TEMP'), '/', config.get('PACKAGE_NAME')])) # remove_and_create_dir(config.get('TEMP')) # remove_and_create_dir(config.get('DEST_OCCPLUG')) print 'SOURCE OCCPLUG: %s' % config.get('SOURCE_OCCPLUG') #checkout_to(concat([config.get('SVN_TRUNK'), src_ext]), config.get('SOURCE_OCCPLUG')) #checkout_to(concat([config.get('SVN_TRUNK'), dist_ext]), distpath) zipfile = 'ErrorList-1.9-bin.zip' jarfile = 'ErrorList.jar' with pushd(): cd(config.get('SOURCE_OCCPLUG')) cmd(build_command(['wget', config.get('ERRORLIST_URL')])) unzip_file_into_dir(zipfile, config.get('DEST_OCCPLUG_JARS')) # Move the ErrorList straight to the destination; we can build against it there. #cmd(build_command(['mv', jarfile, config.get('DEST_OCCPLUG')])) cmd( build_command([ 'ant', '-Djedit.install.dir=/usr/share/jedit', concat(['-Dinstall.dir=', config.get('DEST_OCCPLUG_JARS')]), concat(['-Dbuild.dir=', config.get('TEMP')]), concat(['-lib ', config.get('DEST_OCCPLUG_JARS')]) ])) # Insert the ubuntu.props file into the .jar with pushd(): cd(config.get('OCCPLUG_DISTRIBUTION')) cmd( build_command([ 'jar -uf', concat([config.get('DEST_OCCPLUG_JARS'), '/', 'OccPlug.jar']), 'ubuntu.props' ])) config.set('BUILD_ARCHITECTURE', 'all')
def all_arch(url): config.refresh() if url == 'trunk': url = config.get('SVN_TRUNK') TOOLCHAINS = ['kroc', 'tvm', 'avr'] arch = config.get('BUILD_ARCHITECTURE') for tool in TOOLCHAINS: # I think we need 386 and 686 packages to make # life easier for end-users. #with_temp_dir("%s/%s" % (arch, config.get('TEMP_ROOT') )) # Set the config for the platform. if tool == 'avr': build_avr() elif tool == 'tvm': build_native_tvm() elif tool == 'kroc': build_native_kroc() all(url) build_occplug()
def build_occplug(): header("BUILDING OCCPLUG") config.refresh() config.rebase('TEMP', '@TEMP_ROOT@/occPlug') remove_dir(config.get('TEMP')) src_ext = '/tools/occplug' dist_ext = '/distribution/deb-pkg' #srcpath = concat([config.get('TEMP'), '/src', src_ext]) #mkdir(srcpath) #distpath = concat([config.get('TEMP'), '/src', dist_ext]) #mkdir(distpath) config.set('OCCPLUG_DISTRIBUTION', concat([config.get('SVN'), dist_ext])) #config.rebase('SOURCE_OCCPLUG', srcpath) make_destdirs() remove_dir(concat([config.get('TEMP'), '/', config.get('PACKAGE_NAME')])) # remove_and_create_dir(config.get('TEMP')) # remove_and_create_dir(config.get('DEST_OCCPLUG')) print 'SOURCE OCCPLUG: %s' % config.get('SOURCE_OCCPLUG') #checkout_to(concat([config.get('SVN_TRUNK'), src_ext]), config.get('SOURCE_OCCPLUG')) #checkout_to(concat([config.get('SVN_TRUNK'), dist_ext]), distpath) zipfile = 'ErrorList-1.9-bin.zip' jarfile = 'ErrorList.jar' with pushd(): cd(config.get('SOURCE_OCCPLUG')) cmd(build_command(['wget', config.get('ERRORLIST_URL')])) unzip_file_into_dir(zipfile, config.get('DEST_OCCPLUG_JARS')) # Move the ErrorList straight to the destination; we can build against it there. #cmd(build_command(['mv', jarfile, config.get('DEST_OCCPLUG')])) cmd(build_command(['ant', '-Djedit.install.dir=/usr/share/jedit', concat(['-Dinstall.dir=', config.get('DEST_OCCPLUG_JARS')]), concat(['-Dbuild.dir=', config.get('TEMP')]), concat(['-lib ', config.get('DEST_OCCPLUG_JARS')]) ])) # Insert the ubuntu.props file into the .jar with pushd(): cd(config.get('OCCPLUG_DISTRIBUTION')) cmd(build_command([ 'jar -uf', concat([config.get('DEST_OCCPLUG_JARS'), '/', 'OccPlug.jar']), 'ubuntu.props' ])) config.set('BUILD_ARCHITECTURE', 'all')
def checkout(url): header("RUNNING CHECKOUT") config.refresh() if url == 'trunk': url = config.get('SVN_TRUNK') remove_and_create_dir(config.get('SVN')) # print "In %s" % os.getcwd() cmd(build_command(["svn", "co", url, config.get('SVN')]))
def setup_class(cls): from state import State from main import AlarmDFA import config confs = yaml.safe_load(open('tests/config.yaml').read()) State.config = confs State.alarms = AlarmDFA() State.options = {} State.userconf = {} config.refresh()
def setUpClass(cls): from state import State from main import AlarmDFA import config confs = yaml.load(open('tests/config.yaml').read()) State.config = confs State.alarms = AlarmDFA() State.options = {} State.userconf = {} config.refresh()
def index(): if request.method == 'POST': # capture the user input provided in the flask template urlupdate = request.form['slack_url'] # update the TinyDB instance with the user provided Slack Application URL config.db.update({'slackurl': urlupdate}, where('id') == 1) # use the refresh function in the config module to display the new Slack Application URL slack_url = config.refresh() return render_template('index.html', slack_url=slack_url) if request.method == 'GET': # use the refresh function in the config module to display the new Slack Application URL slack_url = config.refresh() return render_template('index.html', slack_url=slack_url)
def index(): if request.method == 'POST': # capture the json data sent by the DTR webhook and dumps the values for each key as a string dtr_data = request.json event_type = json.dumps(dtr_data["type"]) event_createdAt = json.dumps(dtr_data["createdAt"]) contents_namespace = json.dumps(dtr_data["contents"]["namespace"]) contents_repository = json.dumps(dtr_data["contents"]["repository"]) contents_tag = json.dumps(dtr_data["contents"]["tag"]) contents_digest = json.dumps(dtr_data["contents"]["digest"]) contents_imageName = json.dumps(dtr_data["contents"]["imageName"]) contents_os = json.dumps(dtr_data["contents"]["os"]) contents_architecture = json.dumps( dtr_data["contents"]["architecture"]) contents_author = json.dumps(dtr_data["contents"]["author"]) contents_deletedAt = json.dumps(dtr_data["contents"]["deletedAt"]) event_location = json.dumps(dtr_data["location"]) # format the text message that will be sent to the Slack channel slack_data = { "text": "User " + contents_author.strip('"') + " deleted tag " + contents_imageName + " at " + contents_deletedAt.strip('"') } slack_url = config.refresh() response = requests.post(slack_url, data=json.dumps(slack_data), headers={'Content-Type': 'application/json'}) return '', 200 else: abort(400)
def index(): if request.method == 'POST': # capture the json data sent by the DTR webhook and dumps the values for each key as a string dtr_data = request.json event_type = json.dumps(dtr_data["type"]) event_createdAt = json.dumps(dtr_data["createdAt"]) contents_promotionPolicyID = json.dumps( dtr_data["contents"]["promotionPolicyID"]) contents_digest = json.dumps(dtr_data["contents"]["digest"]) contents_sourceRepository = json.dumps( dtr_data["contents"]["sourceRepository"]) contents_sourceTag = json.dumps(dtr_data["contents"]["sourceTag"]) contents_targetRepository = json.dumps( dtr_data["contents"]["targetRepository"]) contents_targetTag = json.dumps(dtr_data["contents"]["targetTag"]) contents_promotedAt = json.dumps(dtr_data["contents"]["promotedAt"]) event_location = json.dumps(dtr_data["location"]) # format the text message that will be sent to the Slack channel slack_data = { "text": "Promoted image " + contents_sourceRepository + ":" + contents_sourceTag + " to " + contents_targetRepository + " with tag " + contents_targetTag + " at " + contents_promotedAt.strip('"') } slack_url = config.refresh() response = requests.post(slack_url, data=json.dumps(slack_data), headers={'Content-Type': 'application/json'}) return '', 200 else: abort(400)
def testConfig(self, mock_config, mock_time): mock_time.return_value = 1234 mock_config.return_value = yaml.load(""" ipplan: /etc/ipplan.db domain: event snmp: access: version: 2 community: REMOVED port: 161 """) # First access should load the cache but only then self.assertEqual(config.get('ipplan'), '/etc/ipplan.db') self.assertEqual(config.get('snmp', 'access', 'version'), 2) self.assertEqual(config.incarnation(), 1) self.assertEqual(mock_config.call_count, 1) # Advance the clock to have the cache refresh mock_time.return_value = 1235 + config.CONFIG_CACHE self.assertEqual(config.get('snmp', 'access', 'version'), 2) self.assertEqual(config.incarnation(), 1) self.assertEqual(mock_config.call_count, 2) # Try a different config mock_config.return_value = yaml.load(""" snmp: access: version: 3 """) # See so the config updated and that we got a new incarnation number mock_time.return_value = 1236 + config.CONFIG_CACHE*2 self.assertEqual(config.get('snmp', 'access', 'version'), 3) self.assertEqual(config.incarnation(), 2) self.assertEqual(mock_config.call_count, 3) # Try to refresh the config self.assertEqual(config.get('snmp', 'access', 'version'), 3) config.refresh() self.assertEqual(config.get('snmp', 'access', 'version'), 3) # We should keep the incarnation self.assertEqual(config.incarnation(), 2) self.assertEqual(mock_config.call_count, 4)
def start(): parser = argparse.ArgumentParser('satori-alarm') parser.add_argument('--config', help='Config file') parser.add_argument('--log', type=str, default="INFO", help='Config file') options = parser.parse_args() State.options = options State.config = yaml.load(open(options.config).read()) init_logging(options.log) config.refresh() config.start_watch() State.alarms = main.AlarmDFA() save.load() save.start_periodically_dump() spawn_autorestart(main.process_events) spawn_autorestart(main.alarm_tick) import restapi restapi.serve()
def index(): if request.method == 'POST': # capture the json data sent by the DTR webhook and dumps the values for each key as a string dtr_data = request.json event_type = json.dumps(dtr_data["type"]) event_createdAt = json.dumps(dtr_data["createdAt"]) contents_namespace = json.dumps(dtr_data["contents"]["namespace"]) contents_repository = json.dumps(dtr_data["contents"]["repository"]) contents_tag = json.dumps(dtr_data["contents"]["tag"]) contents_imageName = json.dumps(dtr_data["contents"]["imageName"]) summary_namespace = json.dumps( dtr_data["contents"]["scanSummary"]["namespace"]) summary_reponame = json.dumps( dtr_data["contents"]["scanSummary"]["reponame"]) summary_tag = json.dumps(dtr_data["contents"]["scanSummary"]["tag"]) summary_critical = json.dumps( dtr_data["contents"]["scanSummary"]["critical"]) summary_major = json.dumps( dtr_data["contents"]["scanSummary"]["major"]) summary_minor = json.dumps( dtr_data["contents"]["scanSummary"]["minor"]) summary_lastscanstatus = json.dumps( dtr_data["contents"]["scanSummary"]["last_scan_status"]) summary_checkcompletedat = json.dumps( dtr_data["contents"]["scanSummary"]["check_completed_at"]) summary_shouldrescan = json.dumps( dtr_data["contents"]["scanSummary"]["should_rescan"]) summary_hasforeignlayers = json.dumps( dtr_data["contents"]["scanSummary"]["has_foreign_layers"]) event_location = json.dumps(dtr_data["location"]) # format the text message that will be sent to the Slack channel slack_data = { "text": "Image scan completed for " + contents_imageName + " at " + summary_checkcompletedat.strip('"') + " with " + summary_critical + " critical warnings, " + summary_major + " major warnings, and " + summary_minor + " minor warnings." } slack_url = config.refresh() response = requests.post(slack_url, data=json.dumps(slack_data), headers={'Content-Type': 'application/json'}) return '', 200 else: abort(400)
def setUp(self): self.logic = annotator.Annotator() self.mibresolver = MockMibResolver() self.logic._mibresolver = self.mibresolver self.run = actions.RunInformation() config.refresh()
def upload(): header("UPLOADING FILES") print("VERSION: %s" % config.get('UVN')) if config.get('UVN') == 'NO_NAME': header("SET UBUNTU VERSION WITH --ubuntu-version") sys.exit() with pushd(): root = config.get('TEMP_ROOT') # Remove old packages with pushd(): cd(root) remove_dir('PACKAGES') cd(root) mkdir('PACKAGES/binary') config.rebase('VERSION', getUTC()) config.refresh() header("SHIPPING VERSION: %s" % config.get('VERSION')) header("PACKAGING AVR") build_avr() build() dependencies() copy_arduino_config() copy_arduino_build() deb() header("PACKAGING NATIVE KROC") build_native_kroc() dependencies() copy_native_build() deb() header("PACKAGING NATIVE TVM") build_native_tvm() dependencies() copy_native_tvm_build() deb() header("PACKAGING META") meta_deb() arch = config.get('BUILD_ARCHITECTURE') header("PACKAGING OCCPLUG") build_occplug() occplug_deb() config.rebase('BUILD_ARCHITECTURE', arch) META = concat(['occam-pi', '_', config.get('VERSION'), '_', config.get('BUILD_ARCHITECTURE'), '.deb']) KROC = concat([config.get('STEM'), '-kroc-posix-posix', '_', config.get('VERSION'), '_', config.get('BUILD_ARCHITECTURE'), '.deb']) TVM = concat([config.get('STEM'), '-tvm-posix-posix', '_', config.get('VERSION'), '_', config.get('BUILD_ARCHITECTURE'), '.deb']) AVR = concat([config.get('STEM'), '-tvm-avr-arduino', '_', config.get('VERSION'), '_', config.get('BUILD_ARCHITECTURE'), '.deb']) PLUG = concat([config.get('STEM'), '-occplug', '_', config.get('VERSION'), '_', 'all', '.deb']) with pushd(): cd(root) copy_files(META, '.', 'PACKAGES/binary') with pushd(): cd(root + '/kroc-posix-posix') copy_files(KROC, '.', '../PACKAGES/binary') with pushd(): cd(root + '/tvm-posix-posix') copy_files(TVM, '.', '../PACKAGES/binary') with pushd(): cd(root + '/tvm-avr-arduino') copy_files(AVR, '.', '../PACKAGES/binary') with pushd(): cd(root + '/occPlug') copy_files(PLUG, '.', '../PACKAGES/binary') with pushd(): cd(root + '/PACKAGES') cmd(build_command(['dpkg-scanpackages', 'binary', '/dev/null', '|', 'gzip', '-9c', '>', 'binary/Packages.gz'])) with pushd(): cd(root + '/PACKAGES/binary')
def checkout_to(url, path): header("RUNNING CHECKOUT TO...") config.refresh() cmd(build_command(["svn", "co", url, path]))
def with_temp_dir(path): config.refresh() header('SETTING TEMP DIR TO %s/%s' % (path, config.get('BUILD_ARCHITECTURE'))) config.rebase('TEMP_ROOT', '%s/%s' % (path, config.get('BUILD_ARCHITECTURE')))
def upload(): header("UPLOADING FILES") print("VERSION: %s" % config.get('UVN')) if config.get('UVN') == 'NO_NAME': header("SET UBUNTU VERSION WITH --ubuntu-version") sys.exit() with pushd(): root = config.get('TEMP_ROOT') # Remove old packages with pushd(): cd(root) remove_dir('PACKAGES') cd(root) mkdir('PACKAGES/binary') config.rebase('VERSION', getUTC()) config.refresh() header("SHIPPING VERSION: %s" % config.get('VERSION')) header("PACKAGING AVR") build_avr() build() dependencies() copy_arduino_config() copy_arduino_build() deb() header("PACKAGING NATIVE KROC") build_native_kroc() dependencies() copy_native_build() deb() header("PACKAGING NATIVE TVM") build_native_tvm() dependencies() copy_native_tvm_build() deb() header("PACKAGING META") meta_deb() arch = config.get('BUILD_ARCHITECTURE') header("PACKAGING OCCPLUG") build_occplug() occplug_deb() config.rebase('BUILD_ARCHITECTURE', arch) META = concat([ 'occam-pi', '_', config.get('VERSION'), '_', config.get('BUILD_ARCHITECTURE'), '.deb' ]) KROC = concat([ config.get('STEM'), '-kroc-posix-posix', '_', config.get('VERSION'), '_', config.get('BUILD_ARCHITECTURE'), '.deb' ]) TVM = concat([ config.get('STEM'), '-tvm-posix-posix', '_', config.get('VERSION'), '_', config.get('BUILD_ARCHITECTURE'), '.deb' ]) AVR = concat([ config.get('STEM'), '-tvm-avr-arduino', '_', config.get('VERSION'), '_', config.get('BUILD_ARCHITECTURE'), '.deb' ]) PLUG = concat([ config.get('STEM'), '-occplug', '_', config.get('VERSION'), '_', 'all', '.deb' ]) with pushd(): cd(root) copy_files(META, '.', 'PACKAGES/binary') with pushd(): cd(root + '/kroc-posix-posix') copy_files(KROC, '.', '../PACKAGES/binary') with pushd(): cd(root + '/tvm-posix-posix') copy_files(TVM, '.', '../PACKAGES/binary') with pushd(): cd(root + '/tvm-avr-arduino') copy_files(AVR, '.', '../PACKAGES/binary') with pushd(): cd(root + '/occPlug') copy_files(PLUG, '.', '../PACKAGES/binary') with pushd(): cd(root + '/PACKAGES') cmd( build_command([ 'dpkg-scanpackages', 'binary', '/dev/null', '|', 'gzip', '-9c', '>', 'binary/Packages.gz' ])) with pushd(): cd(root + '/PACKAGES/binary')