def train_algorithm(request): module_id = request.GET.get('module_id') scene_id = request.GET.get('scene_id') limit = request.GET.get('limit') if scene_id and module_id: tmp = utils.get_scene_record(module_id,scene_id) for i in tmp: i['data_length'] = range(len(i['data'][i['data'].keys()[0]])) i['resources'] = [] i['apis'] = [] i['api_info'] = [] api_dict = {} for k in i['data'].keys(): if k != 'total' and k.find('#api#') != 0: i['resources'].append(k) if k != 'total' and k.find('#api#') == 0: api_dict[k[5:]] = i['data'][k] #this_api_id = utils.get_api_by_name(k[5:]) i['api_info'].append(k) # TODO for j in i['data_length']: current_api_dict = {} for k,v in api_dict.iteritems(): current_api_dict[k] = v[j] i['apis'].append(current_api_dict) if limit and int(limit) > 0: ret = {'scene_records' : tmp[:int(limit)]} else: ret = {'scene_records': tmp} ret['module_id'] = module_id ret['scene_id'] = scene_id scene_api = utils.get_scene_api(module_id, scene_id) for s in scene_api: s['api_info'] = utils.get_api(s.get('api_id')) # ge threhold if s['api_info']: s['api_info']['threholds'] = utils.get_api_resource(s.get('api_id')) for th in s['api_info']['threholds'].get('resource_list'): th['name'] = utils.get_resource(th.get('resource_id')).get('name') ret['scene_info'] = utils.get_scene(scene_id) ret['module_info'] = utils.get_module(module_id) ret['scene_api'] = scene_api ret['all_resource'] = [] all_resource_ids = [] # get all resource need for s in scene_api: for id in s.get('api_info').get('threholds').get('resource_id'): if not id in all_resource_ids: all_resource_ids.append(id) ret['all_resource'].append(utils.get_resource(id)) ret["public"] = utils.get_public(request) return render(request, 'assess/train_algorithm.html', {'data': ret}) else: return render(request, 'error.html')
def load_sound(self): self.intro_sound = pygame.mixer.Sound( utils.get_resource("334261__projectsu012__coin-chime.wav") ) self.typing_sound = pygame.mixer.Sound( utils.get_resource("194799__jim-ph__keyboard5.wav") ) self.background_music = pygame.mixer.music.load( utils.get_resource("Synthwave7.wav") )
def load_ui(self): self.font = pygame.font.Font( utils.get_resource("ZX-Spectrum/zxspectr.ttf"), 24) self.audio_icon = pygame.image.load( utils.get_resource("500px-Speaker_Icon.svg.png")) self.audio_icon = pygame.transform.scale( self.audio_icon, (constants.AUDIO_ICON_WIDTH, constants.AUDIO_ICON_HEIGHT)) self.audio_icon_rect = self.audio_icon.get_rect() self.audio_icon_rect.x = constants.AUDIO_ICON_X self.audio_icon_rect.y = constants.AUDIO_ICON_Y
def get_driver_path(self): # Get architecture. self.get_architecture() if self.architecture == 'x64': # Select 64 bit driver. self.driver = get_resource(os.path.join('drivers', 'winpmem64.sys')) elif self.architecture == 'x86': # Delect 32 bit driver. self.driver = get_resource(os.path.join('drivers', 'winpmem32.sys')) if self.driver and os.path.exists(self.driver): return True return False
async def test_agent_disconnect(resource_container, environment, server, client, clienthelper, async_finalizer, caplog): caplog.set_level(logging.INFO) config.Config.set("config", "server-timeout", "1") config.Config.set("config", "agent-reconnect-delay", "1") config.Config.set("config", "agent-deploy-interval", "0") config.Config.set("config", "agent-repair-interval", "0") version = await clienthelper.get_version() await clienthelper.put_version_simple([get_resource(version)], version) result = await client.release_version(environment, version, False) assert result.code == 200 agent = await get_agent(server, environment, "agent1") async_finalizer.add(agent.stop) await asyncio.wait_for(server.stop(), timeout=15) def disconnected(): return not agent._instances["agent1"]._enabled await retry_limited(disconnected, 1) i = log_index(caplog, "inmanta.agent.agent.agent1", logging.INFO, "Agent assuming primary role for agent1") i = log_index(caplog, "inmanta.agent.agent", logging.WARNING, "Connection to server lost, taking agents offline", i) log_index(caplog, "inmanta.agent.agent.agent1", logging.INFO, "Agent agent1 stopped because Connection to server lost", i)
def scan(service_path, profile_name, queue_results): # Find Yara signatures, if file is not available, we need to terminate. yara_path = os.path.join(os.getcwd(), 'signatures.yar') if not os.path.exists(yara_path): yara_path = get_resource(os.path.join('rules', 'signatures.yar')) if not os.path.exists(yara_path): raise DetectorError("Unable to find a valid Yara signatures file!") log.info("Selected Yara signature file at %s", yara_path) # Retrieve adress space. space = get_address_space(service_path, profile_name, yara_path) if space == None: log.info("Cannot generate address space") else: log.info("Address space: {0}, Base: {1}".format(space, space.base)) log.info("Profile: {0}, DTB: {1:#x}".format(space.profile, space.dtb)) rules = yara.compile(yara_path) log.info("Starting yara scanner...") matched = [] for process in tasks.pslist(space): # Skip ourselves. if process.UniqueProcessId == os.getpid(): continue try: process_name = process.ImageFileName except: process_name = '' try: try: log.debug("Scanning process %s, pid: %d, ppid: %d, exe: %s, cmdline: %s", process_name, process.UniqueProcessId, process.InheritedFromUniqueProcessId, process.ImagePathName, process.CommandLine) except: log.debug("Scanning process %s, pid: %d", process_name, process.UniqueProcessId) for hit in rules.match(pid=process.UniqueProcessId): log.warning("Process %s (pid: %d) matched: %s, Values:", process_name, process.UniqueProcessId, hit.rule) for entry in hit.strings: log.warning("\t%d, %s, %s", entry[0], entry[1], entry[2]) # We only store unique results, it's pointless to store results # for the same rule. if not hit.rule in matched: # Add rule to the list of unique matches. matched.append(hit.rule) # Add match to the list of results. queue_results.put(dict( rule=hit.rule, detection=hit.meta.get('detection'), )) except Exception as e: log.debug("Unable to scan process: %s", e)
def assert_mom_ee(version, security_mode='permissive'): ensure_prerequisites_installed() ensure_service_account() ensure_permissions() ensure_secret(strict=True if security_mode == 'strict' else False) ensure_docker_credentials() # Deploy MoM-EE in permissive mode app_def_file = '{}/mom-ee-{}-{}.json'.format(fixtures.fixtures_dir(), security_mode, version) assert os.path.isfile( app_def_file ), "Couldn't find appropriate MoM-EE definition: {}".format(app_def_file) image = mom_ee_image(version) print('Deploying {} definition with {} image'.format(app_def_file, image)) app_def = get_resource(app_def_file) app_def['container']['docker'][ 'image'] = 'mesosphere/marathon-dcos-ee:{}'.format(image) client = marathon.create_client() client.add_app(app_def) shakedown.deployment_wait() shakedown.wait_for_service_endpoint(mom_ee_endpoint( version, security_mode))
def assert_mom_ee(version, security_mode='permissive'): ensure_service_account() ensure_permissions() ensure_sa_secret(strict=True if security_mode == 'strict' else False) ensure_docker_config_secret() # In strict mode all tasks are started as user `nobody` by default. However we start # MoM-EE as 'root' and for that we need to give root marathon ACLs to start # tasks as 'root'. if security_mode == 'strict': common.add_dcos_marathon_user_acls() # Deploy MoM-EE in permissive mode app_def_file = '{}/mom-ee-{}-{}.json'.format(fixtures.fixtures_dir(), security_mode, version) assert os.path.isfile(app_def_file), "Couldn't find appropriate MoM-EE definition: {}".format(app_def_file) image = mom_ee_image(version) logger.info('Deploying {} definition with {} image'.format(app_def_file, image)) app_def = get_resource(app_def_file) app_def['container']['docker']['image'] = 'mesosphere/marathon-dcos-ee:{}'.format(image) app_id = app_def["id"] client = marathon.create_client() client.add_app(app_def) deployment_wait(service_id=app_id) shakedown.dcos.service.wait_for_service_endpoint(mom_ee_endpoint(version, security_mode), path="ping")
def assert_mom_ee(version, security_mode='permissive'): ensure_prerequisites_installed() ensure_service_account() ensure_permissions() ensure_sa_secret(strict=True if security_mode == 'strict' else False) ensure_docker_config_secret() # In strict mode all tasks are started as user `nobody` by default. However we start # MoM-EE as 'root' and for that we need to give root marathon ACLs to start # tasks as 'root'. if security_mode == 'strict': common.add_dcos_marathon_user_acls() # Deploy MoM-EE in permissive mode app_def_file = '{}/mom-ee-{}-{}.json'.format(fixtures.fixtures_dir(), security_mode, version) assert os.path.isfile( app_def_file ), "Couldn't find appropriate MoM-EE definition: {}".format(app_def_file) image = mom_ee_image(version) print('Deploying {} definition with {} image'.format(app_def_file, image)) app_def = get_resource(app_def_file) app_def['container']['docker'][ 'image'] = 'mesosphere/marathon-dcos-ee:{}'.format(image) app_id = app_def["id"] client = marathon.create_client() client.add_app(app_def) common.deployment_wait(service_id=app_id) common.wait_for_service_endpoint(mom_ee_endpoint(version, security_mode), path="ping")
def put(self, id): r = get_resource(data, id) args = self.reqparse.parse_args() for k, v in args.items(): if v is not None: r[k] = v return {key: marshal(r, fields)}
def _send_get_request(self, end_point): if self._auth_token is None: response = utils.get_resource(self._api_url + end_point) else: response = ua.get_protected_resource( endpoint=self._api_url + end_point, token=self._auth_token) return response
def scan(service_path, profile_name, queue_results): # Find Yara signatures, if file is not available, we need to terminate. yara_path = os.path.join(os.getcwd(), 'signatures.yar') if not os.path.exists(yara_path): yara_path = get_resource(os.path.join('rules', 'signatures.yar')) if not os.path.exists(yara_path): raise DetectorError("Unable to find a valid Yara signatures file!") log.info("Selected Yara signature file at %s", yara_path) # Retrieve adress space. space = get_address_space(service_path, profile_name, yara_path) if space == None: log.info("Cannot generate address space") else: log.info("Address space: {0}, Base: {1}".format(space, space.base)) log.info("Profile: {0}, DTB: {1:#x}".format(space.profile, space.dtb)) # Initialize Volatility's YaraScan module. yara = malfind.YaraScan(space.get_config()) log.info("Starting yara scanner...") matched = [] for o, address, hit, value in yara.calculate(): if not o: owner = 'Unknown Kernel Memory' elif o.obj_name == '_EPROCESS': # If the PID is of the current process, it's a false positive. # It just detected the Yara signatures in memory. Skip. if int(o.UniqueProcessId) == int(os.getpid()): continue # Skip also if it's a child process. if int(o.InheritedFromUniqueProcessId) == int(os.getpid()): continue owner = 'Process {0} (pid: {1})'.format(o.ImageFileName, o.UniqueProcessId) else: owner = '{0}'.format(o.BaseDllName) # Extract hexdump of the memory chunk that matched the signature. rule_data = '' for offset, hexdata, translated_data in utils.Hexdump(value): rule_data += '{0} {1}\n'.format(hexdata, ''.join(translated_data)) log.warning("%s matched: %s at address: 0x%X, Value:\n\n%s", owner, hit.rule, address, rule_data) if not hit.rule in matched: # Add the rule to the list of matched rules, so we don't have # useless repetitions. matched.append(hit.rule) queue_results.put(dict( rule=hit.rule, detection=hit.meta.get('detection') ))
def scan(queue_results): # Find Yara signatures, if file is not available, we need to terminate. yara_path = os.path.join(os.getcwd(), 'signatures.yar') if not os.path.exists(yara_path): yara_path = get_resource(os.path.join('rules', 'signatures.yar')) if not os.path.exists(yara_path): raise DetectorError("Unable to find a valid Yara signatures file!") log.info("Selected Yara signature file at %s", yara_path) rules = yara.compile(yara_path) matched = [] for process in psutil.process_iter(): # Skip ourselves. if process.pid == os.getpid(): continue try: process_name = process.name() except: process_name = '' # If there is a process name, let's match it against the whitelist # and skip if there is a match. # TODO: this is hacky, need to find a better solution to false positives # especially with security software. if process_name: if process_name.lower() in process_whitelist: continue try: try: log.debug("Scanning process %s, pid: %d, ppid: %d, exe: %s, cmdline: %s", process_name, process.pid, process.ppid(), process.exe(), process.cmdline()) except: log.debug("Scanning process %s, pid: %d", process_name, process.pid) for hit in rules.match(pid=process.pid): log.warning("Process %s (pid: %d) matched: %s, Values:", process_name, process.pid, hit.rule) for entry in hit.strings: log.warning("\t%d, %s, %s", entry[0], entry[1], entry[2]) # We only store unique results, it's pointless to store results # for the same rule. if not hit.rule in matched: # Add rule to the list of unique matches. matched.append(hit.rule) # Add match to the list of results. queue_results.put(dict( rule=hit.rule, detection=hit.meta.get('detection'), )) except Exception as e: log.debug("Unable to scan process: %s", e)
def api_view(request): data = {} data["public"] = utils.get_public(request) data["resource"] = utils.get_resource() id = request.GET.get("id") if id: data["api"] = utils.get_api(id) data["resource_list"] = utils.get_api_resource(id) return render(request, 'assess/api.html', {"data":data})
def prefetch_docker_images_on_all_nodes(): agents = get_private_agents() data = get_resource("pod-2-containers.json") data['constraints'] = unique_host_constraint() data['scaling']['instances'] = len(agents) client = marathon.create_client() client.add_pod(data) time_deployment("undeploy") delete_all_pods()
def __init__(self, prefix, token, cwd): Client.__init__(self) self.prefix = prefix self.token = token self.cwd = cwd self.plugin_manager = PluginManager(self, '%s/plugins' % self.cwd) self.plugin_manager.load_plugins() user_agent = get_resource(self.cwd, 'user_agent') self.client_session = ClientSession(headers={'User-Agent': user_agent})
def __init__(self): fn = get_resource('data', 'security-assistant.ui') builder = Gtk.Builder() builder.add_from_file(fn) window = builder.get_object("ui") window.show_all() window.connect("destroy", Gtk.main_quit) self._notebook = window.get_child() self._notebook.remove_page(self._notebook.get_current_page()) self._add_intro_tab()
def load_app(app_def_file, app_id=None): """Loads an app definition from a json file and sets the app id.""" app_path = os.path.join(apps_dir(), "{}.json".format(app_def_file)) app = get_resource(app_path) if app_id is None: app['id'] = make_id(app_def_file) else: app['id'] = app_id return app
def load_app(app_def_file, app_id=None, parent_group="/"): """Loads an app definition from a json file and sets the app id.""" app_path = os.path.join(apps_dir(), "{}.json".format(app_def_file)) app = get_resource(app_path) if app_id is None: app['id'] = make_id(app_def_file, parent_group) else: app['id'] = join(parent_group, app_id) logger.info('Loaded an app definition with id={}'.format(app['id'])) return app
def load_ui(self): self.font = pygame.font.Font(utils.get_resource("ZX-Spectrum/zxspectr.ttf"), 36) self.title = self.font.render("PONG", True, constants.BUTTON_COLOR) self.plus_button = self.font.render("Enter game", True, constants.BUTTON_COLOR) self.type_prompt = self.font.render( f"> {self.game_name}", True, constants.BUTTON_COLOR ) self.plus_button_rect = self.plus_button.get_rect() self.title_rect = self.title.get_rect() self.audio_icon = pygame.image.load( utils.get_resource("500px-Speaker_Icon.svg.png") ) self.audio_icon = pygame.transform.scale( self.audio_icon, (constants.AUDIO_ICON_WIDTH, constants.AUDIO_ICON_HEIGHT) ) self.audio_icon_rect = self.audio_icon.get_rect() self.audio_icon_rect.x = constants.AUDIO_ICON_X self.audio_icon_rect.y = constants.AUDIO_ICON_Y
def test_default_user(): """Install the Marathon package for DC/OS. """ # launch unique-sleep application_json = get_resource("{}/unique-sleep.json".format(fixture_dir())) client = marathon.create_client() client.add_app(application_json) app = client.get_app(application_json['id']) assert app['user'] == None # wait for deployment to finish tasks = client.get_tasks("unique-sleep") host = tasks[0]['host'] assert run_command_on_agent(host,"ps aux | grep '[s]leep ' | awk '{if ($1 !=\"root\") exit 1;}'")
def test_mom_with_network_failure_bounce_master(): """Marathon on Marathon (MoM) tests for DC/OS with network failures simulated by knocking out ports """ # get MoM ip mom_ip = ip_of_mom() print("MoM IP: {}".format(mom_ip)) app_def = get_resource("{}/large-sleep.json".format(fixture_dir())) with shakedown.marathon_on_marathon(): client = marathon.create_client() client.add_app(app_def) shakedown.wait_for_task("marathon-user", "sleep") tasks = client.get_tasks('sleep') original_sleep_task_id = tasks[0]["id"] task_ip = tasks[0]['host'] print("\nTask IP: " + task_ip) # PR for network partitioning in shakedown makes this better # take out the net partition_agent(mom_ip) partition_agent(task_ip) # wait for a min time.sleep(timedelta(minutes=1).total_seconds()) # bounce master shakedown.run_command_on_master("sudo systemctl restart dcos-mesos-master") # bring the net up reconnect_agent(mom_ip) reconnect_agent(task_ip) time.sleep(timedelta(minutes=1).total_seconds()) shakedown.wait_for_service_endpoint('marathon-user', timedelta(minutes=10).total_seconds()) with shakedown.marathon_on_marathon(): client = marathon.create_client() shakedown.wait_for_task("marathon-user", "sleep", timedelta(minutes=10).total_seconds()) tasks = client.get_tasks('sleep') current_sleep_task_id = tasks[0]["id"] assert current_sleep_task_id == original_sleep_task_id, "Task ID shouldn't change"
def test_mom_with_network_failure_bounce_master(): """Marathon on Marathon (MoM) tests for DC/OS with network failures simulated by knocking out ports """ # get MoM ip mom_ip = ip_of_mom() print("MoM IP: {}".format(mom_ip)) app_def = get_resource("{}/large-sleep.json".format(fixture_dir())) with shakedown.marathon_on_marathon(): client = marathon.create_client() client.add_app(app_def) shakedown.wait_for_task("marathon-user", "sleep") tasks = client.get_tasks('sleep') original_sleep_task_id = tasks[0]["id"] task_ip = tasks[0]['host'] print("\nTask IP: " + task_ip) # PR for network partitioning in shakedown makes this better # take out the net partition_agent(mom_ip) partition_agent(task_ip) # wait for a min time.sleep(timedelta(minutes=1).total_seconds()) # bounce master shakedown.run_command_on_master("sudo systemctl restart dcos-mesos-master") # bring the net up reconnect_agent(mom_ip) reconnect_agent(task_ip) time.sleep(timedelta(minutes=1).total_seconds()) shakedown.wait_for_service_endpoint('marathon-user') shakedown.wait_for_task("marathon-user", "sleep") with shakedown.marathon_on_marathon(): client = marathon.create_client() shakedown.wait_for_task("marathon-user", "sleep") tasks = client.get_tasks('sleep') current_sleep_task_id = tasks[0]["id"] assert current_sleep_task_id == original_sleep_task_id, "Task ID shouldn't change"
def single_api_view(request): ret = {} ret['public'] = utils.get_public(request) api_id = request.GET.get('api_id') records = utils.get_api_record(api_id) api_resource = utils.get_api_resource(api_id) ret['resource'] = [] ret['records'] = [] for i in api_resource['resource_id']: ret['resource'].append(utils.get_resource(i)) ret['records'] = records for r in ret['records']: r['data'] = json.loads(r['data']) ret['api_id'] = api_id return render(request, 'assess/api_record.html', {'data': ret})
def test_default_user(): """Ensures the default user of a task that is created is started as root. """ # launch unique-sleep application_json = get_resource("{}/unique-sleep.json".format( fixture_dir())) client = marathon.create_client() client.add_app(application_json) app = client.get_app(application_json['id']) assert app['user'] is None # wait for deployment to finish tasks = client.get_tasks("unique-sleep") host = tasks[0]['host'] assert run_command_on_agent( host, "ps aux | grep '[s]leep ' | awk '{if ($1 !=\"root\") exit 1;}'")
def single_api_view(request): ret = {} ret['public'] = utils.get_public(request) api_id = request.GET.get('api_id') records = utils.get_api_record(api_id) api_resource = utils.get_api_resource(api_id) ret['resource'] = [] ret['records'] = [] for i in api_resource['resource_id']: ret['resource'].append(utils.get_resource(i)) ret['records'] = records for r in ret['records']: r['data'] = json.loads(r['data']) ret['api_id'] = api_id return render(request, 'assess/api_record.html', {'data':ret })
def get_formula_by_threshold(request): if request.method != "POST": return public.fail_result_http(u"POST only") body = json.loads(request.body) record_id = body.get('record_id') module_id = body.get('module_id') scene_id = body.get('scene_id') apis = utils.get_api_by_scene_module(module_id=module_id, scene_id=scene_id) all_formulas = utils.get_formula(record_id) all_y_for_threshold = {} for f in all_formulas: a = json.loads(f.get('formula')).get('a') b = json.loads(f.get('formula')).get('b') resource_id = f.get('resource_id') for api in apis: # get threshold this_threshold = utils.get_threshold_by_api_resource( api_id=api.get('id'), resource_id=resource_id) resource_name = utils.get_resource(resource_id).get('name') if all_y_for_threshold.get(resource_name): all_y_for_threshold[resource_name] = min( all_y_for_threshold[resource_name], float(this_threshold) * float(a) + float(b)) else: all_y_for_threshold[resource_name] = float( this_threshold) * float(a) + float(b) min_y = None min_y_resource = None for k, v in all_y_for_threshold.iteritems(): if min_y == None: min_y = all_y_for_threshold.get(k) min_y_resource = k else: if all_y_for_threshold.get(k) < min_y: min_y_resource = k min_y = all_y_for_threshold.get(k) return public.success_result_http({ 'resource_name': min_y_resource, 'tps': min_y })
def test_default_user(): """ Ensures the default user of a task is started as root. This is the default user. """ # launch unique-sleep application_json = get_resource("{}/unique-sleep.json".format(fixture_dir())) client = marathon.create_client() client.add_app(application_json) app = client.get_app(application_json['id']) assert app['user'] is None # wait for deployment to finish tasks = client.get_tasks("unique-sleep") host = tasks[0]['host'] assert run_command_on_agent(host, "ps aux | grep '[s]leep ' | awk '{if ($1 !=\"root\") exit 1;}'") client = marathon.create_client() client.remove_app("/unique-sleep")
def test_mom_with_network_failure(): """Marathon on Marathon (MoM) tests for DC/OS with network failures simulated by knocking out ports """ # get MoM ip mom_ip = ip_of_mom() print("MoM IP: {}".format(mom_ip)) app_def = get_resource("{}/large-sleep.json".format(fixture_dir())) with marathon_on_marathon(): client = marathon.create_client() client.add_app(app_def) shakedown.wait_for_task("marathon-user", "sleep") tasks = client.get_tasks('sleep') original_sleep_task_id = tasks[0]["id"] task_ip = tasks[0]['host'] # PR for network partitioning in shakedown makes this better # take out the net partition_agent(mom_ip) partition_agent(task_ip) # wait for a min service_delay() # bring the net up reconnect_agent(mom_ip) reconnect_agent(task_ip) service_delay() shakedown.wait_for_service_endpoint(PACKAGE_APP_ID) shakedown.wait_for_task("marathon-user", "sleep") with marathon_on_marathon(): client = marathon.create_client() shakedown.wait_for_task("marathon-user", "sleep") tasks = client.get_tasks('sleep') current_sleep_task_id = tasks[0]["id"] assert current_sleep_task_id == original_sleep_task_id, "Task ID shouldn't change"
def load_configuration_files(): """Load configuration files""" path_glob = get_resource('checks', '*.yaml') log.debug("scanning %r", path_glob) fnames = sorted(glob(path_glob)) log.debug("%d config files to be loaded", len(fnames)) config_blocks = [] for fn in fnames: with open(fn) as f: confs = yaml.load(f) if not isinstance(confs, list): confs = [ confs, ] for c in confs: config_blocks.append(Check(c)) log.debug("%d config blocks found", len(config_blocks)) # create config hierarchy # extract root blocks config_tree = [b for b in config_blocks if not b.depends_on] child_blocks = [b for b in config_blocks if b.depends_on] while child_blocks: for pos, block in enumerate(child_blocks): parent = find_config_block(config_tree, block.depends_on) if parent is None: continue # hopefully the parent will show up later try: parent.children.append(block) except KeyError: parent['children'] = [ block, ] child_blocks.pop(pos) return config_tree
def __init__(self, screen): self.screen = screen self.logo = pygame.image.load(utils.get_resource("pygame_powered.gif")) self.rect = self.logo.get_rect() self.x = constants.SCREEN_CENTER[0] - self.rect.width / 2 self.y = constants.SCREEN_CENTER[1] - self.rect.height / 2 self.game_name = "" self.load_sound() self.intro_sound.play(fade_ms=2000) self.load_ui() self.time = time.time() self.playing_music = True pygame.mixer.music.play(fade_ms=20000)
def provider_details(request, number): #if check_if_resource_exists(number) != 200: # return render(request, 'providerregistry/404.html', {}) provider = get_resource(number) basic = provider.get("basic", {}) random_background = "%s.jpg" % (random.randrange(1, 27)) #Get Gravatar URL gravatar_email = basic.get('gravatart_email', "") gravatar_url = get_gravatar_url(hash_gravatar_email(gravatar_email)) context = { "enumeration": provider, "random_bg_image": random_background, "gravatar_url": gravatar_url, "PROVIDER_STATIC_HOST": settings.PROVIDER_STATIC_HOST, } return render(request, 'providerregistry/details.html', context)
def clean_number(self): number = self.cleaned_data["number"] #check if the number meets our criteria if len(number) != 10: raise forms.ValidationError("This number must be 10 digits long.") try: number = int(number) except ValueError: raise forms.ValidationError("You must supply a number containing exactly 10 digits.") if not vnpi.verify_npi(number): raise forms.ValidationError("This enumeration does not appear to be a valid NPI number.") r = get_resource(str(number)) if not r: raise forms.ValidationError("This enumeration number is not in the public registry.") return number
def test_default_user(): """ Ensures the default user of a task is started as root. This is the default user. """ # launch unique-sleep application_json = get_resource("{}/unique-sleep.json".format(fixture_dir())) client = marathon.create_client() client.add_app(application_json) shakedown.deployment_wait() app = client.get_app(application_json['id']) user = app.get('user') assert user is None # wait for deployment to finish tasks = client.get_tasks("unique-sleep") host = tasks[0]['host'] assert shakedown.run_command_on_agent(host, "ps aux | grep '[s]leep ' | awk '{if ($1 !=\"root\") exit 1;}'") client = marathon.create_client() client.remove_app("/unique-sleep")
def provider_profile(request, number): #if check_if_resource_exists(number) != 200: # return render(request, 'providerregistry/404.html', {}) random_background = "%s.jpg" % (random.randrange(1, 27)) provider = get_resource(number) basic = provider.get("basic", {}) pecos = get_pecos_base(number) #Get Gravatar URL gravatar_email = basic.get('gravatart_email', "") gravatar_url = get_gravatar_url(hash_gravatar_email(gravatar_email)) #Get Google q addresses = provider.get("addresses", []) qooglemap_q = "" location = {} for a in addresses: if a.get("address_purpose", "") == "LOCATION": location = a googlemap_q = googlemap_address_query( address_1=location.get("address_1", ""), address_2=location.get("address_2", ""), city=location.get("city", ""), state=location.get("state", ""), zipcode=location.get("zip", ""), ) context = { "enumeration": provider, "pecos": pecos, "random_bg_image": random_background, "gravatar_url": gravatar_url, "googlemap_q": googlemap_q, "PROVIDER_STATIC_HOST": settings.PROVIDER_STATIC_HOST } return render(request, 'providerregistry/stylish-portfolio.html', context)
def add_rating(vbox, desc, n): """Add rating""" if n is None: return hbox = Gtk.HBox(homogeneous=False, spacing=0) hbox.show() vbox.pack_start(hbox, False, False, 0) label = Gtk.Label("%s: " % desc) label.set_use_markup(True) label.set_alignment(0, 0.5) label.show() hbox.pack_start(label, False, False, 0) fn = get_resource('data', 'rating.png') for _ in xrange(int(n)): i = Gtk.Image() i.show() i.set_alignment(0, 0.5) i.set_from_file(fn) hbox.pack_start(i, expand=False, fill=False, padding=0)
def load_configuration_files(): """Load configuration files""" path_glob = get_resource('checks', '*.yaml') log.debug("scanning %r", path_glob) fnames = sorted(glob(path_glob)) log.debug("%d config files to be loaded", len(fnames)) config_blocks = [] for fn in fnames: with open(fn) as f: confs = yaml.load(f) if not isinstance(confs, list): confs = [confs, ] for c in confs: config_blocks.append(Check(c)) log.debug("%d config blocks found", len(config_blocks)) # create config hierarchy # extract root blocks config_tree = [b for b in config_blocks if not b.depends_on] child_blocks = [b for b in config_blocks if b.depends_on] while child_blocks: for pos, block in enumerate(child_blocks): parent = find_config_block(config_tree, block.depends_on) if parent is None: continue # hopefully the parent will show up later try: parent.children.append(block) except KeyError: parent['children'] = [block, ] child_blocks.pop(pos) return config_tree
def get_formula_by_threshold(request): if request.method != "POST": return public.fail_result_http(u"POST only") body = json.loads(request.body) record_id = body.get('record_id') module_id = body.get('module_id') scene_id = body.get('scene_id') apis = utils.get_api_by_scene_module(module_id=module_id, scene_id = scene_id) all_formulas = utils.get_formula(record_id) all_y_for_threshold = {} for f in all_formulas: a = json.loads(f.get('formula')).get('a') b = json.loads(f.get('formula')).get('b') resource_id = f.get('resource_id') for api in apis: # get threshold this_threshold = utils.get_threshold_by_api_resource(api_id=api.get('id'), resource_id= resource_id) resource_name = utils.get_resource(resource_id).get('name') if all_y_for_threshold.get(resource_name): all_y_for_threshold[resource_name] = min( all_y_for_threshold[resource_name], float(this_threshold)*float(a) + float(b)) else: all_y_for_threshold[resource_name] = float(this_threshold)*float(a) + float(b) min_y = None min_y_resource = None for k,v in all_y_for_threshold.iteritems(): if min_y == None: min_y = all_y_for_threshold.get(k) min_y_resource = k else: if all_y_for_threshold.get(k) < min_y: min_y_resource = k min_y = all_y_for_threshold.get(k) return public.success_result_http({'resource_name': min_y_resource, 'tps': min_y})
def load_app(app_name): app_path = os.path.join(apps_dir(), "{}.json".format(app_name)) app = get_resource(app_path) app['id'] = make_id(app_name) return app
async def test_show_messages_actionlog(server, environment, client, cli, agent, clienthelper): """ Test the `inmanta-cli action-log show-messages` command. """ result = await client.reserve_version(tid=environment) assert result.code == 200 version = result.result["data"] resource1 = get_resource(version, key="test1") await clienthelper.put_version_simple(resources=[resource1], version=version) result = await agent._client.resource_action_update( tid=environment, resource_ids=[resource1["id"]], action_id=uuid.uuid4(), action=ResourceAction.deploy, started=datetime.datetime.now(), finished=datetime.datetime.now(), status=ResourceState.deployed, messages=[ { "level": "DEBUG", "msg": "Started deployment", "timestamp": datetime.datetime.now().isoformat(timespec="microseconds"), "args": [], }, { "level": "INFO", "msg": "Deployed successfully", "timestamp": datetime.datetime.now().isoformat(timespec="microseconds"), "args": [], }, ], changes={}, change=Change.nochange, send_events=False, ) assert result.code == 200 # Obtain action_id result = await client.get_resource(tid=environment, id=resource1["id"], logs=True, log_action=ResourceAction.deploy) assert result.code == 200 assert len(result.result["logs"]) == 1 action_id = result.result["logs"][0]["action_id"] result = await cli.run("action-log", "show-messages", "-e", str(environment), "--rvid", resource1["id"], "--action-id", str(action_id)) assert result.exit_code == 0 assert "DEBUG Started deployment" in result.output assert "INFO Deployed successfully" in result.output
async def test_list_actionlog(server, environment, client, cli, agent, clienthelper): """ Test the `inmanta-cli action-log list` command. """ def assert_nr_records_in_output_table(output: str, nr_records: int) -> None: lines = [ line.strip() for line in output.split("\n") if line.strip() and line.strip().startswith("|") ] actual_nr_of_records = len(lines) - 1 # Exclude the header assert nr_records == actual_nr_of_records result = await client.reserve_version(tid=environment) assert result.code == 200 version = result.result["data"] resource1 = get_resource(version, key="test1") resource2 = get_resource(version, key="test2") await clienthelper.put_version_simple(resources=[resource1, resource2], version=version) result = await agent._client.resource_action_update( tid=environment, resource_ids=[resource1["id"]], action_id=uuid.uuid4(), action=ResourceAction.deploy, started=datetime.datetime.now(), finished=datetime.datetime.now(), status=ResourceState.failed, messages=[ { "level": "INFO", "msg": "Deploying", "timestamp": datetime.datetime.now().isoformat(timespec="microseconds"), "args": [], }, { "level": "ERROR", "msg": "Deployment failed", "timestamp": datetime.datetime.now().isoformat(timespec="microseconds"), "args": [], "status": ResourceState.failed.value, }, ], changes={}, change=Change.nochange, send_events=False, ) assert result.code == 200 result = await agent._client.resource_action_update( tid=environment, resource_ids=[resource2["id"]], action_id=uuid.uuid4(), action=ResourceAction.deploy, started=datetime.datetime.now(), finished=datetime.datetime.now(), status=ResourceState.deployed, messages=[ { "level": "INFO", "msg": "Deployed successfully", "timestamp": datetime.datetime.now().isoformat(timespec="microseconds"), "args": [], }, ], changes={}, change=Change.nochange, send_events=False, ) assert result.code == 200 # Get all resource actions for resource1 result = await cli.run("action-log", "list", "-e", str(environment), "--rvid", resource1["id"]) assert result.exit_code == 0 assert_nr_records_in_output_table( result.output, nr_records=2) # 1 store action + 1 deploy action # Get deploy resource actions for resource1 result = await cli.run("action-log", "list", "-e", str(environment), "--rvid", resource1["id"], "--action", "deploy") assert result.exit_code == 0 assert_nr_records_in_output_table(result.output, nr_records=1) # 1 deploy action # Resource id is provided instead of resource version id resource_id = resource1["id"].rsplit(",", maxsplit=1)[0] result = await cli.run("action-log", "list", "-e", str(environment), "--rvid", resource_id) assert result.exit_code != 0 assert f"Invalid value for '--rvid': {resource_id}" in result.stderr # Incorrect format resource version id result = await cli.run("action-log", "list", "-e", str(environment), "--rvid", "test") assert result.exit_code != 0 assert "Invalid value for '--rvid': test" in result.stderr
import sys import Queue import random import threading from PyQt4.QtCore import * from PyQt4.QtGui import * from PyQt4.QtWebKit import * from bottle import * from utils import get_resource, check_connection import detector # Add the gui folder to the template path of bottle.py. TEMPLATE_PATH.insert(0, get_resource('gui')) # Instatiate the Bottle application. webapp = Bottle() # This queue will contain the list of matches retrieved from the yara scan. queue_results = Queue.Queue() # This queue will contain the list of errors generated by the detector. queue_errors = Queue.Queue() # Instatiate the thread that will run the detector. scanner = threading.Thread(target=detector.main, args=(queue_results, queue_errors)) scanner.daemon = True # Enabled language. lang = 'en'
def load_pod(pod_name): pod_path = os.path.join(pods_dir(), "{}.json".format(pod_name)) pod = get_resource(pod_path) pod['id'] = make_id(pod_name) return pod
def _pods_json(file="simple-pods.json"): return get_resource(os.path.join(fixture_dir(), file))
def static(path): return static_file(path, get_resource('gui/static/'))
def GrayscaleTransformations(event): """ Changing the response body by converting the images to grayscale and changing the css colors definitions to colors from gray shades palette """ request = event.request response = request.response context = request.get('PUBLISHED') resp_body = response.getBody() resp_body = getattr(context, 'GET', lambda: '')() if hasattr(context, 'im_self'): context = context.im_self if isinstance(context, ImageScale): context = context.data if hasattr(context, 'context'): context = context.context content_type = getattr(context, 'content_type', '') if callable(content_type): content_type = content_type() or '' if not content_type: content_type = response.headers.get('content-type') or '' if isinstance(context, FilesystemFile): resp_body = context().read() content_type = context.getContentType().split(';')[0] filename = getattr(context, 'getId', lambda: False)() filename = getattr(context, 'filename', filename) if not filename: try: filename = context.__name__ except AttributeError: return images_content_types = ['image/png', 'image/jpg', 'image/jpeg', 'image/gif'] browser_resource_image = False if isinstance(context, browserresourcefile) and \ context.content_type.split(';')[0] in images_content_types: browser_resource_image = True if content_type: content_type = content_type.split(';')[0].strip() if 'javascript' in content_type: return if isinstance(context, FSImage) or \ IOFSImage.providedBy(context) or \ IATImage.providedBy(context) or \ content_type in images_content_types or \ browser_resource_image: try: path = '/'.join(context.getPhysicalPath()) except AttributeError: path = filename try: resp_body = utils.get_resource(request, response, path) except NotFound: image_body = resp_body if not image_body: if hasattr(context, 'data'): image_body = context.data elif isinstance(context, FSImage): image_body = context._readFile(True) if image_body: resp_body = utils.image_to_grayscale(image_body, path) else: log.debug('Image doesn\'t contain any data: %s' % (path)) if queryUtility(IResourceDirectory, name=u''): utils.store_resource(path, resp_body) elif IBrowserView.providedBy(request.get('PUBLISHED')) or \ content_type in ['text/html', 'text/css'] or \ isinstance(context, (File, FSFile, ATFile, FSPageTemplate)) and \ context.content_type.split(';')[0] in ['text/html', 'text/css']: if hasattr(aq_base(context), 'data'): resp_body = context.data if hasattr(resp_body, 'data'): resp_body = resp_body.data if isinstance(context, FSFile): resp_body = context._readFile(0) if content_type == 'text/css' or \ isinstance(context, (File, FSFile, ATFile, FSPageTemplate)) and \ context.content_type.split(';')[0] == 'text/css': try: resp_body = utils.get_resource(request, response, filename) except (NotFound, AttributeError): resp_body = utils.transform_style_properties(resp_body) resp_body = utils.transform_css_url(resp_body) if queryUtility(IResourceDirectory, name=u''): utils.store_resource(filename, resp_body) else: if not resp_body: rendered_body = utils.render_object_html(context, request) if rendered_body: resp_body = rendered_body resp_body = utils.add_bodyclass(resp_body) resp_body = utils.transform_style_properties(resp_body) resp_body = utils.transform_css_url(resp_body) resp_body = utils.transform_img_src(resp_body) response.setBody(resp_body)
def pod(id=1, instance=1, type="4"): data = get_resource("pod-{}-containers.json".format(type)) data['id'] = "/" + str(id) data['scaling']['instances'] = instance return data