def __init__(self): context = rule_engine.Context(resolver=rule_engine.resolve_attribute) self.rule_1 = rule_engine.Rule('main_genre == "" and platform == "" ', context=context) self.rule_2 = rule_engine.Rule('main_genre != "" and platform == "" ', context=context) self.rule_3 = rule_engine.Rule('main_genre == "" and platform != "" ', context=context) self.rule_4 = rule_engine.Rule('main_genre != "" and platform != "" ', context=context) context = rule_engine.Context(resolver=rule_engine.resolve_attribute) self.rule_5 = rule_engine.Rule( 'main_genres_length == 0 and platforms_length != 0 ', context=context) self.rule_6 = rule_engine.Rule( 'main_genres_length != 0 and platforms_length == 0 ', context=context) self.rule_7 = rule_engine.Rule( 'main_genres_length == 0 and platforms_length == 0 ', context=context) self.rule_8 = rule_engine.Rule( 'main_genres_length != 0 and platforms_length != 0 ', context=context) context = rule_engine.Context(resolver=rule_engine.resolve_attribute) self.rule_9 = rule_engine.Rule('recommendation_num > 20', context=context) self.rule_10 = rule_engine.Rule('results_length > 60', context=context) self.rule_11 = rule_engine.Rule('recommendation_num > results_length', context=context)
def initialize(self): self._context = rule_engine.Context( resolver=_context_resolver, type_resolver=rule_engine.type_resolver_from_dict(self.rule_types)) self._pending = set() signals.server_initialized.connect(self.on_server_initialized) signals.rpc_user_logged_out.connect(self.on_rpc_user_logged_out) return True
def main(): parser = argparse.ArgumentParser( conflict_handler='resolve', description=DESCRIPTION, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('-d', '--depth', default=2, type=int, help='the depth to pretty print') parser.add_argument('--gzip', action='store_true', default=False, help='decompress the file') parser.add_argument('--regex-case-sensitive', default=False, action='store_true', help='use case-sensitive regular expressions') parser.add_argument('json_file', type=argparse.FileType('rb'), help='the JSON file to filter') parser.add_argument('rule', help='the rule to apply') parser.epilog = EPILOG arguments = parser.parse_args() re_flags = re.MULTILINE if arguments.regex_case_sensitive: re_flags &= re.IGNORECASE context = rule_engine.Context(default_value=None, regex_flags=re_flags) try: rule = rule_engine.Rule(arguments.rule, context=context) except rule_engine.RuleSyntaxError as error: print(error.message) return 0 file_object = arguments.json_file if arguments.gzip: file_object = gzip.GzipFile(fileobj=file_object) total = 0 matches = 0 for line in file_object: result = json.loads(line.decode('utf-8')) total += 1 if not rule.matches(result): continue matches += 1 print(result_to_url(result)) if arguments.depth > 0: for key in BLACKLIST: result.pop(key, None) pprint.pprint(result, depth=arguments.depth) print("rule matched {:,} of {:,} results ({:.2f}%)".format( matches, total, ((matches / total) * 100))) return 0
def main(): parser = argparse.ArgumentParser( conflict_handler='resolve', description=DESCRIPTION, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--gzip', action='store_true', default=False, help='decompress the file') parser.add_argument('json_file', type=argparse.FileType('rb'), help='the JSON file to filter') arguments = parser.parse_args() re_flags = re.IGNORECASE | re.MULTILINE context = rule_engine.Context(default_value=None, regex_flags=re_flags) file_object = arguments.json_file if arguments.gzip: file_object = gzip.GzipFile(fileobj=file_object) results = [json.loads(line.decode('utf-8')) for line in file_object] with open(RULES_FILE, 'r') as file_h: rules = yaml.load(file_h, Loader=yaml.FullLoader) for vulnerability in rules['rules']: try: rule = rule_engine.Rule(vulnerability['rule'], context=context) except rule_engine.RuleSyntaxError as error: print(error.message) return 0 matches = tuple(rule.filter(results)) if not matches: continue print(vulnerability['description']) references = vulnerability.get('references', {}) _print_references(references) print('Hosts:') for match in matches: print(" * {}".format(results_filter.result_to_url(match))) print() return 0
def __init__(self, *args, **kwargs): super(CampaignViewGenericTableTab, self).__init__(*args, **kwargs) treeview = self.gobjects['treeview_campaign'] self.treeview_manager = managers.TreeViewManager( treeview, selection_mode=Gtk.SelectionMode.MULTIPLE, cb_delete=self._prompt_to_delete_row, cb_refresh=self.load_campaign_information ) self.treeview_manager.set_column_titles( self.view_column_titles, column_offset=1, renderers=tuple(column.cell_renderer() for column in self.view_columns) ) for column in self.view_columns: if isinstance(column, extras.ColumnDefinitionDatetime): self.treeview_manager.column_views[column.title].set_fixed_width(150) self.popup_menu = self.treeview_manager.get_popup_menu() """The :py:class:`Gtk.Menu` object which is displayed when right-clicking in the view area.""" treeview = self.gobjects['treeview_campaign'] self._rule = None self._rule_context = rule_engine.Context(type_resolver=rule_engine.type_resolver_from_dict( dict((column.name, rule_engine.DataType.from_type(column.python_type)) for column in self.view_columns) )) view_column_types = tuple(column.g_type for column in self.view_columns) self._tv_model = Gtk.ListStore(str, *view_column_types) self._tv_model_filter = self._tv_model.filter_new() self._tv_model_filter.set_visible_func(self._tv_filter) tree_model_sort = Gtk.TreeModelSort(model=self._tv_model_filter) for idx, column in enumerate(self.view_columns, 1): if column.sort_function is not None: tree_model_sort.set_sort_func(idx, column.sort_function, idx) treeview.set_model(tree_model_sort) self.application.connect('server-connected', self.signal_kp_server_connected) filter_revealer = self.gobjects['revealer_filter'] menu_item = Gtk.CheckMenuItem.new_with_label('Show Filter') menu_item.set_active(filter_revealer.get_reveal_child()) menu_item.connect('toggled', self.signal_toggled_show_filter) menu_item.show() self.popup_menu.append(menu_item)
def main(): parser = argparse.ArgumentParser( conflict_handler='resolve', description=DESCRIPTION, formatter_class=argparse.RawDescriptionHelpFormatter) auth_type_parser_group = parser.add_mutually_exclusive_group() auth_type_parser_group.add_argument( '--auth-token', dest='auth_token', help='authenticate to github with a token') auth_type_parser_group.add_argument( '--auth-user', dest='auth_user', help='authenticate to github with credentials') parser.add_argument('repo_slug', help='the repository to filter') parser.add_argument('type', choices=('issues', 'pulls'), help='thing to filter') parser.add_argument('rule', help='the rule to apply') parser.epilog = EPILOG arguments = parser.parse_args() # need to define a custom context to use a custom resolver function context = rule_engine.Context( resolver=rule_engine.engine.resolve_attribute) try: rule = rule_engine.Rule(arguments.rule, context=context) except rule_engine.RuleSyntaxError as error: print(error.message) return 0 gh = _get_github(arguments) repo = gh.get_repo(arguments.repo_slug) things = tuple(getattr(repo, 'get_' + arguments.type)(state='all')) for thing in rule.filter(things): print("{0}#{1: <4} - {2}".format(arguments.repo_slug, thing.number, thing.title)) return 0
def main(): parser = argparse.ArgumentParser( conflict_handler='resolve', description=DESCRIPTION, formatter_class=argparse.RawDescriptionHelpFormatter ) parser.add_argument('csv_file', type=argparse.FileType('r'), help='the CSV file to filter') parser.add_argument('rule', help='the rule to apply') arguments = parser.parse_args() # need to define a custom context to use a custom resolver function context = rule_engine.Context(resolver=resolve_item) try: rule = rule_engine.Rule(arguments.rule, context=context) except rule_engine.RuleSyntaxError as error: print(error.message) return 0 csv_reader = csv.DictReader(arguments.csv_file) csv_writer = csv.DictWriter(sys.stdout, csv_reader.fieldnames, dialect=csv_reader.dialect) for row in rule.filter(csv_reader): csv_writer.writerow(row) return 0
module['platforms'] = tuple( platform.strip() for platform in set(module['platform'].split(','))) with (app_path / 'flag.png').open('rb') as file_h: FLAG = file_h.read() if not config.DEBUG: (app_path / 'flag.png').unlink() rule_context = rule_engine.Context( type_resolver={ 'name': rule_engine.DataType.STRING, 'fullname': rule_engine.DataType.STRING, 'aliases': rule_engine.DataType.ARRAY(rule_engine.DataType.STRING), 'rank': rule_engine.DataType.FLOAT, 'type': rule_engine.DataType.STRING, 'author': rule_engine.DataType.ARRAY(rule_engine.DataType.STRING), 'description': rule_engine.DataType.STRING, 'rport': rule_engine.DataType.FLOAT, 'autofilter_ports': rule_engine.DataType.ARRAY( rule_engine.DataType.FLOAT), 'check': rule_engine.DataType.BOOLEAN, 'platforms': rule_engine.DataType.ARRAY(rule_engine.DataType.STRING), 'ref_name': rule_engine.DataType.STRING }) #----------------------------------------------------------------------------# # App Config. #----------------------------------------------------------------------------# app = Flask(__name__) app.config.from_object('config') #db = SQLAlchemy(app)
@property def test(self): return self._test @property def action(self): return self._action @property def type(self): return self._type # This context is used when we want to analyze python objects resolver_context = rule_engine.Context(resolver=rule_engine.resolve_attribute) event_to_class_map = { "incoming.motion": motion_event, "incoming.weather": weather_forecast, } class event_details: def __init__(self, routingKey, body): self._routingKey = routingKey self._body = body self.matched = False # convert body from bytecode to dictionary body_dict = json.loads(body.decode())
def __init__(self, data): self.data = data self._rule_context = rule_engine.Context(default_value=None)