def generate_rules_mk(cli): """Generates a rules.mk file from info.json. """ # Determine our keyboard/keymap if cli.args.keymap: km = locate_keymap(cli.args.keyboard, cli.args.keymap) km_json = json_load(km) validate(km_json, 'qmk.keymap.v1') kb_info_json = dotty(km_json.get('config', {})) else: kb_info_json = dotty(info_json(cli.args.keyboard)) info_rules_map = json_load(Path('data/mappings/info_rules.json')) rules_mk_lines = [GPL2_HEADER_SH_LIKE, GENERATED_HEADER_SH_LIKE] # Iterate through the info_rules map to generate basic rules for rules_key, info_dict in info_rules_map.items(): new_entry = process_mapping_rule(kb_info_json, rules_key, info_dict) if new_entry: rules_mk_lines.append(new_entry) # Iterate through features to enable/disable them if 'features' in kb_info_json: for feature, enabled in kb_info_json['features'].items(): feature = feature.upper() enabled = 'yes' if enabled else 'no' rules_mk_lines.append(f'{feature}_ENABLE ?= {enabled}') # Set SPLIT_TRANSPORT, if needed if kb_info_json.get('split', {}).get('transport', {}).get('protocol') == 'custom': rules_mk_lines.append('SPLIT_TRANSPORT ?= custom') # Set CUSTOM_MATRIX, if needed if kb_info_json.get('matrix_pins', {}).get('custom'): if kb_info_json.get('matrix_pins', {}).get('custom_lite'): rules_mk_lines.append('CUSTOM_MATRIX ?= lite') else: rules_mk_lines.append('CUSTOM_MATRIX ?= yes') # Show the results dump_lines(cli.args.output, rules_mk_lines) if cli.args.output: if cli.args.quiet: if cli.args.escape: print(cli.args.output.as_posix().replace(' ', '\\ ')) else: print(cli.args.output) else: cli.log.info('Wrote rules.mk to %s.', cli.args.output)
def api_request(): def make_request(payload): """Fake request for example purpose. :param dict payload: Example payload :return dict: Example response """ return { 'status': { 'code': 200, 'msg': 'User created', }, 'data': { 'user': { 'id': 123, 'personal': { 'name': 'Arnold', 'email': '*****@*****.**', }, 'privileges': { 'granted': ['login', 'guest', 'superuser'], 'denied': ['admin'], 'history': { 'actions': [ ['superuser granted', '2018-04-29T17:08:48'], ['login granted', '2018-04-29T17:08:48'], ['guest granted', '2018-04-29T17:08:48'], ['created', '2018-04-29T17:08:48'], ['signup_submit', '2018-04-29T17:08:47'], ], }, }, }, }, } from dotty_dict import dotty request = dotty() request['request.data.payload'] = { 'name': 'Arnold', 'email': '*****@*****.**', 'type': 'superuser' } request['request.data.headers'] = {'content_type': 'application/json'} request['request.url'] = 'http://127.0.0.1/api/user/create' response = dotty(make_request(request.to_dict())) assert response['status.code'] == 200 assert 'superuser' in response['data.user.privileges.granted']
def generate_valid_data_for_create(self, parent='', valid_data=None, data=None): valid_data = valid_data or dotty() data = data or self.properties parent = parent or '' for property_name, requirements in data.items(): if isinstance(requirements, dict): if property_name in self.properties.keys(): parent = '' parent += property_name + '.' self.generate_valid_data_for_create(valid_data=valid_data, data=requirements, parent=parent) else: try: valid_data[parent + property_name] = getattr( requirements, 'generate_valid_data')() except AttributeError as e: raise e return valid_data.to_dict()
def setUp(self): self.dot = dotty({ 'field1': 'Value of F1', 'field2': 'Value of F2', 'field3': [ { 'subfield1': 'Value of subfield1 (item 0)', 'subfield2': 'Value of subfield2 (item 0)' }, { 'subfield1': 'Value of subfield1 (item 1)', 'subfield2': 'Value of subfield2 (item 1)' }, ], 'field4': 'Not wanted', 'field5': [{ 'subfield1': [{ 'subsubfield': 'Value of sub subfield (item 0)' }] }], 'field6': ['a', 'b'] })
def setUp(self): self.dot = dotty({ 'field1': [{ "subfield1": "value01", "subfield2": "value02" }, { "subfield1": "value11", "subfield2": "value12" }, { "subfield1": "value21", "subfield2": "value22" }, { "subfield1": "value31", "subfield2": "value32" }], "field2": [{ "subfield1": [{ "nestedsubfield1": "nestedvalue001", "nestedsubfield2": "nestedvalue002" }, { "nestedsubfield1": "nestedvalue011", "nestedsubfield2": "nestedvalue012" }] }, { "subfield1": [{ "nestedsubfield1": "nestedvalue101", "nestedsubfield2": "nestedvalue102" }, { "nestedsubfield1": "nestedvalue111", "nestedsubfield2": "nestedvalue112" }] }] })
def list_embedded(): from dotty_dict import dotty # dotty supports embedded lists # WARNING! # Dotty used to support lists only with dotty_l. # This feature is depreciated and was removed - now lists have native support. # If you need old functionality pass additional flag 'no_list' to dotty dot = dotty({ 'annotations': [ {'label': 'app', 'value': 'webapi'}, {'label': 'role', 'value': 'admin'}, ], 'spec': { 'containers': [ ['gpu', 'tensorflow', 'ML'], ['cpu', 'webserver', 'sql'], ] } }) assert dot['annotations.0.label'] == 'app' assert dot['annotations.0.value'] == 'webapi' assert dot['annotations.1.label'] == 'role' assert dot['annotations.1.value'] == 'admin' assert dot['spec.containers.0.0'] == 'gpu' assert dot['spec.containers.0.1'] == 'tensorflow' assert dot['spec.containers.0.2'] == 'ML' assert dot['spec.containers.1.0'] == 'cpu' assert dot['spec.containers.1.1'] == 'webserver' assert dot['spec.containers.1.2'] == 'sql'
def map_sysmon_fields(evt): evt_fields_to_add = dotty() if 'event_data' in evt: evt_fields = list(evt['event_data'].keys()) for field in evt_fields: if field in sysmon_fields_mapping: evt_fields_to_add[ sysmon_fields_mapping[field]] = evt['event_data'][field] del evt['event_data'][field] # event fields by event id event_id = evt['event']['id'] if event_id in sysmon_event_action: evt_fields_to_add['event.action'] = sysmon_event_action[event_id] if event_id in sysmon_event_type: evt_fields_to_add['event.type'] = sysmon_event_type[event_id] if event_id in sysmon_event_category: evt_fields_to_add['event.category'] = sysmon_event_category[event_id] evt_fields_to_add['event.kind'] = 'event' evt_fields_to_add['event.module'] = 'sysmon' evt_fields_to_add = dict(evt_fields_to_add) # merge sub-dicts - event evt['event'] = {**evt['event'], **evt_fields_to_add['event']} del evt_fields_to_add['event'] return {**evt, **evt_fields_to_add}
def add_process_args(evt): evt = split_command_line(evt, "process.command_line", "process.args") d = dotty(evt) if "process.args" in d and d["process.args"] and len( d["process.args"]) > 0: d["process.args_count"] = len(d["process.args"]) return dict(d)
def generate_dfu_header(cli): """Generates the Keyboard.h file. """ # Determine our keyboard(s) if not cli.config.generate_dfu_header.keyboard: cli.log.error('Missing parameter: --keyboard') cli.subcommands['info'].print_help() return False if not is_keyboard(cli.config.generate_dfu_header.keyboard): cli.log.error('Invalid keyboard: "%s"', cli.config.generate_dfu_header.keyboard) return False # Build the Keyboard.h file. kb_info_json = dotty(info_json(cli.config.generate_dfu_header.keyboard)) keyboard_h_lines = [GPL2_HEADER_C_LIKE, GENERATED_HEADER_C_LIKE, '#pragma once'] keyboard_h_lines.append(f'#define MANUFACTURER {kb_info_json["manufacturer"]}') keyboard_h_lines.append(f'#define PRODUCT {kb_info_json["keyboard_name"]} Bootloader') # Optional if 'qmk_lufa_bootloader.esc_output' in kb_info_json: keyboard_h_lines.append(f'#define QMK_ESC_OUTPUT {kb_info_json["qmk_lufa_bootloader.esc_output"]}') if 'qmk_lufa_bootloader.esc_input' in kb_info_json: keyboard_h_lines.append(f'#define QMK_ESC_INPUT {kb_info_json["qmk_lufa_bootloader.esc_input"]}') if 'qmk_lufa_bootloader.led' in kb_info_json: keyboard_h_lines.append(f'#define QMK_LED {kb_info_json["qmk_lufa_bootloader.led"]}') if 'qmk_lufa_bootloader.speaker' in kb_info_json: keyboard_h_lines.append(f'#define QMK_SPEAKER {kb_info_json["qmk_lufa_bootloader.speaker"]}') # Show the results dump_lines(cli.args.output, keyboard_h_lines, cli.args.quiet)
def test_dotty_and_not_mapping_instance_should_not_be_equal(self): dot = dotty({'a': 1, 'b': 2}) self.assertNotEqual(dot, [('a', 1), ('b', 2)]) self.assertNotEqual(dot, ('a', 1)) self.assertNotEqual(dot, {1, 2, 3}) self.assertNotEqual(dot, 123) self.assertNotEqual(dot, 'a:1, b:2')
def set_additional_file_fields_from_path(evt): d = dotty(evt) if "file.path" in d and d["file.path"]: filepath = d["file.path"] d["file.name"] = filepath.split("\\")[-1] d["file.directory"] = '\\'.join(filepath.split("\\")[:-1]) return {**evt, **dict(d)}
def expand_from_json_file(infile, output_directory=None): """ Given json input file, read code_src and write code_snap. Parameters ---------- infile : str filename of json dictionary include psnap metadata and state data. output_directory : str, optional Output directory for code_snap else default is to use same directory as code_src. Returns ------- dict Dictionary containing "code_src" and "code_snap". """ with open(sys.argv[1], encoding="utf-8") as f: data = json.load(f) if dotty is not None: from psnap import state_tracker meta_key = state_tracker.StateTracker._meta_key data_key = data[meta_key]["data_key"] data[data_key] = dotty(data[data_key]) return KeywordExpander.expand_from_json( data, output_directory=output_directory)
def __init__(self, *args, **kwargs): """Constructor for GParser""" super().__init__(*args, **kwargs) self.params = None self._path = "" self._loaded_paths = [] self._content = dotty(OrderedDict())
def _extract_config_h(info_data): """Pull some keyboard information from existing config.h files """ config_c = config_h(info_data['keyboard_folder']) # Pull in data from the json map dotty_info = dotty(info_data) info_config_map = json_load(Path('data/mappings/info_config.json')) for config_key, info_dict in info_config_map.items(): info_key = info_dict['info_key'] key_type = info_dict.get('value_type', 'str') try: if config_key in config_c and info_dict.get('to_json', True): if dotty_info.get(info_key) and info_dict.get('warn_duplicate', True): _log_warning(info_data, '%s in config.h is overwriting %s in info.json' % (config_key, info_key)) if key_type.startswith('array'): if '.' in key_type: key_type, array_type = key_type.split('.', 1) else: array_type = None config_value = config_c[config_key].replace('{', '').replace('}', '').strip() if array_type == 'int': dotty_info[info_key] = list(map(int, config_value.split(','))) else: dotty_info[info_key] = config_value.split(',') elif key_type == 'bool': dotty_info[info_key] = config_c[config_key] in true_values elif key_type == 'hex': dotty_info[info_key] = '0x' + config_c[config_key][2:].upper() elif key_type == 'list': dotty_info[info_key] = config_c[config_key].split() elif key_type == 'int': dotty_info[info_key] = int(config_c[config_key]) else: dotty_info[info_key] = config_c[config_key] except Exception as e: _log_warning(info_data, f'{config_key}->{info_key}: {e}') info_data.update(dotty_info) # Pull data that easily can't be mapped in json _extract_matrix_info(info_data, config_c) _extract_audio(info_data, config_c) _extract_split_main(info_data, config_c) _extract_split_transport(info_data, config_c) _extract_split_right_pins(info_data, config_c) return info_data
def add_command_name(evt): d = dotty(evt) if "event_data.CommandName" in d: cmd_name = d["event_data.CommandName"] if cmd_name: d["powershell.command.name"] = cmd_name del d["event_data.CommandName"] return dict(d)
def add_engine_version(evt): d = dotty(evt) if "event_data.EngineVersion" in d: engine_version = d["event_data.EngineVersion"] if engine_version: d["powershell.engine.version"] = engine_version del d["event_data.EngineVersion"] return dict(d)
def test_copy(self): first = dotty({'a': 1, 'b': 2}) second = first.copy() self.assertIsInstance(second, Dotty) self.assertEqual(first, second) self.assertIsNot(first, second) self.assertIsNot(first._data, second._data)
def api_client(apiurl: str, config: dict, stored_responses: dict, https_verify: bool): uri = config.get('uri', '/') method = config.get('method', 'POST') payload = config.get('payload', None) raw_payload = config.get('raw_payload', None) stored_responses['random'] = {'uuid4': uuid4()} ipaddr = socket.gethostbyname(socket.gethostname()) stored_responses['ipaddr'] = {'ip': ipaddr} response_codes = [200, 204] variable_match = RE_VARIABLE(uri) if variable_match is not None: stored_response = dotty(stored_responses[variable_match.group(1)]) uri = uri.replace( variable_match.group(0), str(stored_response[variable_match.group(2)]) ) if payload: for k, v in payload.items(): if not isinstance(v, str): continue variable_match = RE_VARIABLE(v) if variable_match is None: continue stored_response = dotty(stored_responses[variable_match.group(1)]) payload[k] = v.replace( variable_match.group(0), str(stored_response[variable_match.group(2)]) ) elif raw_payload: payload = raw_payload if method == 'POST': response = requests.post("%s%s" % (apiurl, uri), json=payload, verify=https_verify) if response.status_code not in response_codes: sys.exit(1) try: data = json.loads(response.text) if isinstance(data, dict): return data else: return None except Exception: return None elif method == 'DELETE': response = requests.delete("%s%s" % (apiurl, uri), verify=https_verify) return response.status_code == 200
def add_pipeline_id(evt): d = dotty(evt) if "event_data.PipelineId" in d: id = d["event_data.PipelineId"] if id: d["powershell.pipeline_id"] = id del d["event_data.PipelineId"] return dict(d)
def add_runspace_id(evt): d = dotty(evt) if "event_data.RunspaceId" in d: id = d["event_data.RunspaceId"] if id: d["powershell.runspace_id"] = id del d["event_data.RunspaceId"] return dict(d)
def set_process_name_from_path(evt, path_field, name_field): d = dotty(evt) if name_field in d and d[name_field]: return if path_field in d: exe = d[path_field] d[name_field] = exe.split("\\")[-1] return {**evt, **dict(d)}
def add_executable_version(evt): d = dotty(evt) if "event_data.HostVersion" in d: version = d["event_data.HostVersion"] if version: d["powershell.process.executable_version"] = version del d["event_data.HostVersion"] return dict(d)
def add_command_value(evt): d = dotty(evt) if "event_data.CommandLine" in d: cmd_value = d["event_data.CommandLine"] if cmd_value: d["powershell.command.value"] = cmd_value del d["event_data.CommandLine"] return dict(d)
def add_command_path(evt): d = dotty(evt) if "event_data.CommandPath" in d: cmd_path = d["event_data.CommandPath"] if cmd_path: d["powershell.command.path"] = cmd_path del d["event_data.CommandPath"] return dict(d)
def test_string_digit_key(self): dot = dotty({'field': {'1': 'one', '5': 'five'}}) dict_one = dot['field.1'] dict_five = dot['field.5'] self.assertEqual(dict_one, 'one') self.assertEqual(dict_five, 'five')
def add_script_block_id(evt): d = dotty(evt) if "event_data.ScriptBlockId" in d: id = d["event_data.ScriptBlockId"] if id: d["powershell.file.script_block_id"] = id del d["event_data.ScriptBlockId"] return dict(d)
def test_list_as_return_value(self): dot = dotty({'field': ['list_field0', 'list_field1']}) self.assertEqual(dot['field.0'], 'list_field0') self.assertEqual(dot['field.1'], 'list_field1') self.assertTrue('field.0' in dot) self.assertTrue('field.1' in dot) self.assertFalse('field.2' in dot)
def add_script_block_text(evt): d = dotty(evt) if "event_data.ScriptBlockText" in d: text = d["event_data.ScriptBlockText"] if text: d["powershell.file.script_block_text"] = text del d["event_data.ScriptBlockText"] return dict(d)
def add_command_type(evt): d = dotty(evt) if "event_data.CommandType" in d: cmd_type = d["event_data.CommandType"] if cmd_type: d["powershell.command.type"] = cmd_type del d["event_data.CommandType"] return dict(d)
def test_integer_keys(self): dot = dotty({'field': {1: 'one', 5: 'five'}}) dict_one = dot['field.1'] dict_five = dot['field.5'] self.assertEqual(dict_one, 'one') self.assertEqual(dict_five, 'five')