def test_prepare_ncep_reanalysis1_pr_wtr(tmpdir): output_path = Path(tmpdir) expected_metadata_path = output_path / "pr_wtr.eatm.2018.test.ga-md.yaml" expected_doc = [ { "crs": "epsg:4236", "datetime": "2018-01-01T00:00:00+00:00", "geometry": { "coordinates": [[ [-1.25, 91.25], [-1.25, -91.25], [358.75, -91.25], [358.75, 91.25], [-1.25, 91.25], ]], "type": "Polygon", }, "grids": { "default": { "shape": [73, 144], "transform": [2.5, 0.0, -1.25, 0.0, -2.5, 91.25, 0.0, 0.0, 1.0], } }, "id": "fb3afcb0-4301-57c5-8455-35a64e3b0c53", "lineage": {}, "measurements": { "water_vapour": { "band": 1, "layer": "pr_wtr", "path": "pr_wtr.eatm.2018.test.nc", } }, "product": { "href": "https://collections.dea.ga.gov.au/noaa_c_c_prwtreatm_1" }, "properties": { "item:providers": [{ "name": "NOAA/OAR/ESRL PSD", "roles": ["producer"], "url": "https://www.esrl.noaa.gov/psd/data/gridded/data.ncep.reanalysis.derived.surface.html", }], "odc:creation_datetime": "2019-05-15T07:29:04.948999+00:00", "odc:file_format": "NetCDF", }, }, { "crs": "epsg:4236", "datetime": "2018-01-01T06:00:00+00:00", "geometry": { "coordinates": [[ [-1.25, 91.25], [-1.25, -91.25], [358.75, -91.25], [358.75, 91.25], [-1.25, 91.25], ]], "type": "Polygon", }, "grids": { "default": { "shape": [73, 144], "transform": [2.5, 0.0, -1.25, 0.0, -2.5, 91.25, 0.0, 0.0, 1.0], } }, "id": "47d52e5b-b6aa-5cb6-888d-06c8e4bfa756", "lineage": {}, "measurements": { "water_vapour": { "band": 2, "layer": "pr_wtr", "path": "pr_wtr.eatm.2018.test.nc", } }, "product": { "href": "https://collections.dea.ga.gov.au/noaa_c_c_prwtreatm_1" }, "properties": { "item:providers": [{ "name": "NOAA/OAR/ESRL PSD", "roles": ["producer"], "url": "https://www.esrl.noaa.gov/psd/data/gridded/data.ncep.reanalysis.derived.surface.html", }], "odc:creation_datetime": "2019-05-15T07:34:18.424782+00:00", "odc:file_format": "NetCDF", }, }, ] run_prepare_cli( noaa_c_c_prwtreatm_1_prepare.main, "--output", str(output_path), str(NCEP_PR_WTR_FILE), ) assert expected_metadata_path.exists() docs = list(yaml.safe_load_all(expected_metadata_path.open())) for idx in range(len(expected_doc)): doc_diff = _diff( expected_doc[idx], docs[idx], exclude_paths="root['properties']['odc:creation_datetime']", ) assert doc_diff == {}, pformat(doc_diff)
def _int_model(self, fname): self.Models = {} with open(fname, 'r') as f: for i in yaml.safe_load_all(f): self.Models.update(i)
import sys import ruamel.yaml as yaml TRAFFIC = sys.argv[1] file = "../kubernetes-manifests/loadgenerator.yaml" with open(file, "r") as stream: d = list(yaml.safe_load_all(stream)) d[0]['spec']['template']['spec']['containers'][0]['env'][0]['value'] = TRAFFIC with open(file, "w") as stream: yaml.dump_all(d, stream, default_flow_style=False)
files_list = [] ttp_list = {} # Walk entire SIGMA repo for YAML rule files, append to file list for dp, dn, fn in os.walk(source_dir): for file in fn: if file.endswith(".yml"): files_list.append(os.path.join(dp, file)) # Go through file list and load YAML one file at a time for yaml_file in files_list: with open(yaml_file, 'r') as stream: current_file = yaml_file.rsplit("\\", 1)[1] try: # Some YAML files have multiple YAML files in one, use load all full_yaml_data = list(yaml.safe_load_all(stream)) for yaml_data in full_yaml_data: # Only interested on YAML that has tags section if 'tags' in yaml_data: # print(yaml_data['title']) # Check tags for attack techniques for tag in yaml_data['tags']: ttp = re.search(r"attack.t(\d{4})", tag) # If we find a technique ID, extract it and add/increment entry in TTP list if ttp: ttp_num = ttp[1] if ttp_num not in ttp_list: ttp_list[ttp_num] = [1, []] ttp_list[ttp_num][1].append(current_file) else: ttp_list[ttp_num][0] += 1
def load_all(self): if self._data: return self._data with open(self.yamlf, encoding='utf8') as f: s = yaml.safe_load_all(f) return [i for i in s]