def main(): directory = os.path.dirname(__file__) if len(sys.argv) > 1: json_dir = sys.argv[1] else: json_dir = find_dir(directory, "idioms-json") if len(sys.argv) > 2: xml_dir = sys.argv[2] else: xml_dir = find_dir(directory, "idioms-xml") if not os.path.exists(json_dir): os.makedirs(json_dir) sys.setrecursionlimit(2000) for filename in sorted(os.listdir(xml_dir)): file_and_ext = filename.split(".") xml_path = os.path.join(xml_dir, filename) destination = os.path.join(json_dir, file_and_ext[0] + ".json") destination = os.path.abspath(destination) initialize_options() if file_and_ext[1] == "xml": sys.stdout.write(xml_path + "\n") json_output = elevate_file(xml_path) with io.open(destination, "w", encoding="utf-8") as f: f.write(json_output)
def idiom_mappings(xml_file_path, stored_json): """Test fresh conversion from XML to JSON matches stored JSON samples.""" print("Checking - " + xml_file_path) initialize_options() converted_json = elevate_file(xml_file_path) io = StringIO(converted_json) converted_json = json.load(io) for good, to_check in zip(iterpath(stored_json), iterpath(converted_json)): good_path, good_value = good last_good_field = good_path[-1] if isinstance(good_value, (dict, list)): # No need to verify iterable types. Since we will deal # with individual values in the future. continue if last_good_field in IGNORE: # Since fresh conversion may create dynamic values. # Some fields are omitted for verification. Currently # fields with: identifier and timestamp values. continue yield good, to_check
def main(): # elevate the file & return into a new file initialize_options() results = elevate_file(testfrag) fi = open("testcache2.json", "w") fi.write(results)
def main(): elevator_parser = _get_arg_parser(False) elevator_parser.add_argument( "dir_", help="A directory contaning STIX 1.x documents to be elevated.", metavar="dir") elevator_parser.add_argument("--output-directory", help="output logs", dest="output_directory", action="store", default=None) elevator_args = elevator_parser.parse_args() initialize_options(elevator_args) for filename in os.listdir(elevator_args.dir_): path = os.path.join(elevator_args.dir_, filename) if path.endswith(".xml"): sys.stdout.write(path + "\n") file_and_ext = filename.split(".") set_option_value("file_", file_and_ext[0]) result = elevate_file(path) if elevator_args.output_directory: destination = os.path.join(elevator_args.output_directory, file_and_ext[0] + ".json") destination = os.path.abspath(destination) output_file = open(destination, "w") output_file.write(result) output_file.close() else: print(result + "\n")
def main(): directory = os.path.dirname(__file__) if len(sys.argv) > 1: json_dir = sys.argv[1] else: json_dir = find_dir(directory, "idioms-json") if len(sys.argv) > 2: xml_dir = sys.argv[2] else: xml_dir = find_dir(directory, "idioms-xml") if not os.path.exists(json_dir): os.makedirs(json_dir) for filename in os.listdir(xml_dir): file_and_ext = filename.split(".") xml_path = os.path.join(xml_dir, filename) destination = os.path.join(json_dir, file_and_ext[0] + ".json") destination = os.path.abspath(destination) initialize_options() if file_and_ext[1] == "xml": sys.stdout.write(xml_path + "\n") json_output = elevate_file(xml_path) output_file = open(destination, "w") output_file.write(json_output) output_file.close()
def main(): # Parse stix-elevator command-line args elevator_parser = _get_arg_parser() elevator_args = elevator_parser.parse_args() initialize_options(elevator_args) print(elevate_file(elevator_args.file_))
def main(): for filename in os.listdir(sys.argv[1]): path = os.path.join(sys.argv[1], filename) initialize_options() set_option_value("incidents", False) if path.endswith(".xml"): sys.stdout.write(path + "\n") print(elevate_file(path) + "\n")
def main(): # Parse stix-elevator command-line args elevator_parser = _get_arg_parser() elevator_args = elevator_parser.parse_args() initialize_options(elevator_args) result = elevate_file(elevator_args.file_) if result: print(result + "\n") else: sys.exit(1)
def main(): # Parse stix2-elevator command-line args elevator_parser = _get_arg_parser() elevator_args = elevator_parser.parse_args() sys.setrecursionlimit(3000) initialize_options(elevator_args) result = elevate_file(elevator_args.file_) if result: sys.stdout.write(result + "\n") else: sys.exit(1)
def test_deprecated_elevate_file(): setup_options() directory = os.path.dirname(__file__) xml_idioms_dir = find_dir(directory, "idioms-xml") archive_file = os.path.join(xml_idioms_dir, "141-TLP-marking-structures.xml") with pytest.warns(DeprecationWarning): json_result = elevate_file(archive_file) assert json_result print(json_result)
def convert_to_stix2_from_stix_file_path(stix_file_path): # 1.x -> 2.0 initialize_options() set_option_value('validator_args', '--silent') set_option_value('silent', 'True') stix20_str = elevate_file(stix_file_path) stix20 = parse(replace_stix2_tlp(stix20_str)) # 2.0 -> 2.1 stix20_json = json.loads(str(stix20)) stix21_json_str = step_bundle(stix20_json) stix2 = parse(stix21_json_str) return stix2
def idiom_elevator_mappings(before_file_path, stored_json, version): """Test fresh conversion from XML to JSON matches stored JSON samples.""" print("Checking - " + before_file_path) print("With Master - " + stored_json["id"]) initialize_options() set_option_value("log_level", "CRITICAL") set_option_value("spec_version", version) set_option_value("validator_args", "--no-cache --version " + version) if not get_option_value("policy") == "no_policy": print("'no_policy' is not allowed for testing") set_option_value("policy", "no_policy") sys.setrecursionlimit(3000) converted_json = elevate_file(before_file_path) converted_json = json.loads(converted_json) return idiom_mappings(converted_json, stored_json)
def main(): elevator_parser = _get_arg_parser(False) elevator_parser.add_argument( "dir_", help="A directory containing STIX 1.x documents to be elevated.", metavar="dir") elevator_parser.add_argument("--output-directory", help="output logs", dest="output_directory", action="store", default=None) elevator_args = elevator_parser.parse_args() initialize_options(elevator_args) set_option_value( "validator_args", get_option_value("validator_args") + " --version " + get_option_value("spec_version")) all_succeeded = True sys.setrecursionlimit(2000) for filename in sorted(os.listdir(elevator_args.dir_)): path = os.path.join(elevator_args.dir_, filename) if path.endswith(".xml"): sys.stdout.write(path + "\n") file_and_ext = filename.split(".") set_option_value("file_", file_and_ext[0]) result = elevate_file(path) if result: if elevator_args.output_directory: destination = os.path.join(elevator_args.output_directory, file_and_ext[0] + ".json") destination = os.path.abspath(destination) with io.open(destination, "w", encoding="utf-8") as f: f.write(result) else: sys.stdout.write(result + "\n") else: all_succeeded = False if not all_succeeded: sys.exit(1)
def convert_to_stix_1x_to_21(stix_file_path): # 1.x -> 2.0 initialize_options() set_option_value('validator_args', '--silent') set_option_value('silent', 'True') stix20_str = elevate_file(stix_file_path) stix20 = _replace_stix2_tlp(stix20_str) # for stix2-elevator issue stix20_new = {} stix20_new['id'] = stix20['id'] stix20_new['spec_version'] = stix20['spec_version'] stix20_new['type'] = stix20['type'] stix20_new['objects'] = [] for o_ in stix20['objects']: if o_['type'] == 'identity': if 'identity_class' not in o_: o_['identity_class'] = 'unknown' stix20_new['objects'].append(o_) stix20_str = json.dumps(stix20_new) stix20_o = parse(stix20_str) stix20_json = json.loads(str(stix20_o)) return convert_to_stix_20_to_21(stix20_json)
def idiom_mappings(xml_file_path, stored_json): """Test fresh conversion from XML to JSON matches stored JSON samples.""" print("Checking - " + xml_file_path) print("With Master - " + stored_json["id"]) initialize_options() set_option_value("log_level", "CRITICAL") set_option_value("validator_args", "--no-cache") if not get_option_value("policy") == "no_policy": print("'no_policy' is not allowed for testing") set_option_value("policy", "no_policy") sys.setrecursionlimit(3000) converted_json = elevate_file(xml_file_path) converted_json = json.loads(converted_json) for good, to_check in zip(iterpath(stored_json), iterpath(converted_json)): good_path, good_value = good last_good_field = good_path[-1] if isinstance(good_value, (dict, list)): # Rule #1: No need to verify iterable types. Since we will deal # with individual values in the future. continue if (any(s in (u"object_marking_refs", u"granular_markings") for s in good_path)): # Exception to Rule #1: object_marking_refs and granular_markings # are not verifiable because they contain identifiers per rule #2. continue if last_good_field in IGNORE: # Rule #2: Since fresh conversion may create dynamic values. # Some fields are omitted for verification. Currently # fields with: identifier and timestamp values. continue yield good, to_check