def test_propclass_defaults_eq_dict(): defs = Properties({"key": "lock", "horse": "orange"}) p = Properties({"Foo": "BAR"}, defaults=defs) assert p == {"Foo": "BAR"} assert {"Foo": "BAR"} == p assert p != {"Foo": "bar"} assert {"Foo": "bar"} != p
def test_propclass_getitem(): p = Properties() p.load(StringIO(INPUT)) assert p["key"] == "value" assert p["foo"] == "second definition" with pytest.raises(KeyError): p["missing"]
def test_propclass_store(mocker): mocker.patch('time.localtime', return_value=time.localtime(1478550580)) p = Properties({"key": "value"}) s = StringIO() p.store(s) assert s.getvalue() == '#Mon Nov 07 15:29:40 EST 2016\nkey=value\n' time.localtime.assert_called_once_with(None)
def test_validate_snapshot_generator(pipeline, sdc_executor): generator = 'com.streamsets.datacollector.bundles.content.SnapshotGenerator' # Generate at least one snapshot snapshot = sdc_executor.capture_snapshot(pipeline, start_pipeline=True).snapshot sdc_executor.stop_pipeline(pipeline) assert snapshot is not None bundle = sdc_executor.get_bundle(['SnapshotGenerator']) # Manifest must contain the generator with bundle.open('generators.properties') as zip_file: p = Properties() p.load(zip_file) assert p.get( 'com.streamsets.datacollector.bundles.content.SnapshotGenerator' ) is not None with bundle.open('{}/{}/{}/output.json'.format( generator, pipeline.id, snapshot.snapshot_name)) as raw: bundle_json = json.loads(raw.read().decode()) bundle_snapshot = sdc_models.Snapshot(pipeline.id, snapshot.snapshot_name, bundle_json) assert len(bundle_snapshot) == 1 assert len( bundle_snapshot[pipeline.origin_stage.instance_name].output) == 10 assert '{}/{}/{}/info.json'.format( generator, pipeline.id, snapshot.snapshot_name) in bundle.namelist()
def test_propclass_copy_more(): p = Properties() p.load(StringIO(INPUT)) p2 = p.copy() assert p is not p2 assert isinstance(p2, Properties) assert p == p2 assert dict(p) == dict(p2) == { "foo": "second definition", "bar": "only definition", "key": "value", "zebra": "apple", } p2["foo"] = "third definition" del p2["bar"] p2["key"] = "value" p2["zebra"] = "horse" p2["new"] = "old" assert p != p2 assert dict(p) == { "foo": "second definition", "bar": "only definition", "key": "value", "zebra": "apple", } assert dict(p2) == { "foo": "third definition", "key": "value", "zebra": "horse", "new": "old", }
def test_validate_redaction(sdc_executor): bundle = sdc_executor.get_bundle() # Redaction in files with bundle.open('com.streamsets.datacollector.bundles.content.SdcInfoContentGenerator/conf/sdc.properties') as raw: p = Properties() p.load(raw) assert p.get('https.keystore.password') == 'REDACTED'
def test_propclass_empty(fixed_timestamp): p = Properties() assert len(p) == 0 assert not bool(p) assert dict(p) == {} s = StringIO() p.store(s) assert s.getvalue() == '#' + fixed_timestamp + '\n'
def test_propclass_load_eq_from_dict(): p = Properties() p.load(StringIO(INPUT)) assert p == Properties({ "foo": "second definition", "bar": "only definition", "key": "value", "zebra": "apple", })
def test_propclass_empty_setitem(fixed_timestamp): p = Properties() p["key"] = "value" assert len(p) == 1 assert bool(p) assert dict(p) == {"key": "value"} s = StringIO() p.store(s) assert s.getvalue() == '#' + fixed_timestamp + '\nkey=value\n'
def test_propclass_empty(mocker): mocker.patch('time.localtime', return_value=time.localtime(1478550580)) p = Properties() assert len(p) == 0 assert not bool(p) assert dict(p) == {} s = StringIO() p.store(s) assert s.getvalue() == '#Mon Nov 07 15:29:40 EST 2016\n' time.localtime.assert_called_once_with(None)
def test_generate_new_bundle(sdc_executor): bundle = sdc_executor.get_bundle() # The manifest is created last and contains all the generators with bundle.open('generators.properties') as zip_file: p = Properties() p.load(zip_file) # Default bundle should have the "default" generators assert p.get( 'com.streamsets.datacollector.bundles.content.PipelineContentGenerator' ) is not None assert p.get( 'com.streamsets.datacollector.bundles.content.LogContentGenerator' ) is not None assert p.get( 'com.streamsets.datacollector.bundles.content.SdcInfoContentGenerator' ) is not None # And should have generators that user needs to explicitly allow assert p.get( 'com.streamsets.datacollector.bundles.content.SnapshotGenerator' ) is None # Negative case assert p.get( 'universe.milky_way.solar_system.earth.europe.czech_republic.working_government' ) is None
def test_propclass_load(): p = Properties() p.load(StringIO(INPUT)) assert len(p) == 4 assert bool(p) assert dict(p) == { "foo": "second definition", "bar": "only definition", "key": "value", "zebra": "apple", }
def test_propclass_delitem(): p = Properties() p.load(StringIO(INPUT)) del p["key"] assert len(p) == 3 assert bool(p) assert dict(p) == { "foo": "second definition", "bar": "only definition", "zebra": "apple", }
def get_all_stage_libs(): raw_stagelibs = urllib.request.urlopen( "http://nightly.streamsets.com.s3-us-west-2.amazonaws.com/datacollector/latest/tarball/stage-lib-manifest.properties" ) p = Properties() p.load(raw_stagelibs) return [ lib for lib in [lib.replace('stage-lib.', '') for lib in p if 'stage-lib.' in lib] if lib not in EXCLUDE_LIBS ]
def test_propclass_empty_setitem(mocker): mocker.patch('time.localtime', return_value=time.localtime(1478550580)) p = Properties() p["key"] = "value" assert len(p) == 1 assert bool(p) assert dict(p) == {"key": "value"} s = StringIO() p.store(s) assert s.getvalue() == '#Mon Nov 07 15:29:40 EST 2016\nkey=value\n' time.localtime.assert_called_once_with(None)
def test_propclass_nonempty_load(): p = Properties({"key": "lock", "horse": "orange"}) p.load(StringIO(INPUT)) assert len(p) == 5 assert bool(p) assert dict(p) == { "foo": "second definition", "bar": "only definition", "horse": "orange", "key": "value", "zebra": "apple", }
def test_propclass_setitem(): p = Properties() p.load(StringIO(INPUT)) p["key"] = "lock" assert len(p) == 4 assert bool(p) assert dict(p) == { "foo": "second definition", "bar": "only definition", "key": "lock", "zebra": "apple", }
def test_propclass_delitem_missing(): p = Properties() p.load(StringIO(INPUT)) with pytest.raises(KeyError): del p["missing"] assert len(p) == 4 assert bool(p) assert dict(p) == { "foo": "second definition", "bar": "only definition", "key": "value", "zebra": "apple", }
def test_propclass_additem(): p = Properties() p.load(StringIO(INPUT)) p["new"] = "old" assert len(p) == 5 assert bool(p) assert dict(p) == { "foo": "second definition", "bar": "only definition", "key": "value", "zebra": "apple", "new": "old", }
def test_propclass_eq_repeated_keys(): p = Properties() p.load(StringIO('key = value\nkey: other value\n')) p2 = Properties() p2.load(StringIO('key: whatever\nkey other value')) assert p == p2 assert dict(p) == dict(p2) == {"key": "other value"}
def test_propclass_eq_different_comments(): p = Properties() p.load(StringIO('#This is a comment.\nkey=value\n')) p2 = Properties() p2.load(StringIO('#This is also a comment.\nkey=value\n')) assert p == p2 assert dict(p) == dict(p2)
def test_propclass_copy(): p = Properties({"Foo": "bar"}) p2 = p.copy() assert p is not p2 assert isinstance(p2, Properties) assert p == p2 assert dict(p) == dict(p2) == {"Foo": "bar"} p2["Foo"] = "gnusto" assert dict(p) == {"Foo": "bar"} assert dict(p2) == {"Foo": "gnusto"} assert p != p2 p2["fOO"] = "quux" assert dict(p) == {"Foo": "bar"} assert dict(p2) == {"Foo": "gnusto", "fOO": "quux"} assert p != p2
def updatePropertyFile(path, stcm, mavenPath=''): jp = Properties() jp.load(open(osp.join(stcm, 'paths.properties'))) newProperties = {} for k, v in jp.items(): if k == 'mavenHome': if not mavenPath == '': newProperties[k] = mavenPath elif k == 'PathToSetup': newProperties[k] = path else: newProperties[k] = v with open(osp.join(stcm, 'paths.properties'), 'w') as f: javaproperties.dump(newProperties, f)
def test_propclass_eq_set_nochange(): p = Properties() p.load(StringIO(INPUT)) p2 = Properties() p2.load(StringIO(INPUT)) assert p == p2 assert p["key"] == p2["key"] == "value" p2["key"] = "value" assert p == p2 assert dict(p) == dict(p2)
def test_validate_log_generator(sdc_executor): bundle = sdc_executor.get_bundle(['LogContentGenerator']) # Manifest must contain the generator with bundle.open('generators.properties') as zip_file: p = Properties() p.load(zip_file) assert p.get( 'com.streamsets.datacollector.bundles.content.LogContentGenerator' ) is not None assert p.get( 'com.streamsets.datacollector.bundles.content.PipelineContentGenerator' ) is None assert p.get( 'com.streamsets.datacollector.bundles.content.SdcInfoContentGenerator' ) is None # Main log with bundle.open( 'com.streamsets.datacollector.bundles.content.LogContentGenerator//sdc.log' ) as raw: log = raw.read().decode() assert "Main - Build info" in log assert "Main - Runtime info" in log assert "Main - Starting" in log # We're fine with just validating that gc log is indeed there assert 'com.streamsets.datacollector.bundles.content.LogContentGenerator//gc.log' in bundle.namelist( )
def test_validate_pipeline_generator(pipeline, sdc_executor): bundle = sdc_executor.get_bundle(['PipelineContentGenerator']) # Manifest must contain the generator with bundle.open('generators.properties') as zip_file: p = Properties() p.load(zip_file) assert p.get('com.streamsets.datacollector.bundles.content.PipelineContentGenerator') is not None assert p.get('com.streamsets.datacollector.bundles.content.LogContentGenerator') is None assert p.get('com.streamsets.datacollector.bundles.content.SdcInfoContentGenerator') is None # We should have pipeline in the bundle that we should be able to easily import to the SDC again with bundle.open(f'com.streamsets.datacollector.bundles.content.PipelineContentGenerator/' f'{pipeline.id}/pipeline.json') as raw: bundle_json = json.loads(raw.read().decode()) bundle_pipeline = sdc_models.Pipeline(pipeline=bundle_json) # We need to "reset" the name, otherwise import will fail bundle_pipeline.id = str(uuid4()) sdc_executor.add_pipeline(bundle_pipeline) # History have a known structure as the pipeline have not run yet with bundle.open(f'com.streamsets.datacollector.bundles.content.PipelineContentGenerator/' f'{pipeline.id}/history.json') as raw: bundle_json = json.loads(raw.read().decode()) bundle_history = sdc_models.History(bundle_json) assert len(bundle_history) == 1 entry = bundle_history.latest assert entry['user'] == 'admin' # Validate existence of some other files assert (f'com.streamsets.datacollector.bundles.content.PipelineContentGenerator/{pipeline.id}/info.json' in bundle.namelist()) assert (f'com.streamsets.datacollector.bundles.content.PipelineContentGenerator/{pipeline.id}/offset.json' in bundle.namelist())
def _write(self, spectra, spec_file, as_bytes): """ Writes the spectra to the filehandle. :param spectra: the list of spectra :type spectra: list :param spec_file: the file handle to use :type spec_file: file :param as_bytes: whether to write as bytes or string :type as_bytes: bool """ # Create a writing function which handles the as_bytes argument if as_bytes: def write(string: str): spec_file.write(string.encode()) else: write = spec_file.write first = True for spectrum in spectra: if not first: write(SEPARATOR + "\n") if self._options_parsed.output_sampledata: # prefix sample data with '# ' props = Properties() for k in spectrum.sample_data: v = spectrum.sample_data[k] props[k] = str(v) if isinstance(v, int) or isinstance(v, float): props[k + DATATYPE_SUFFIX] = "N" elif isinstance(v, bool): props[k + DATATYPE_SUFFIX] = "B" elif isinstance(v, str): props[k + DATATYPE_SUFFIX] = "S" else: props[k + DATATYPE_SUFFIX] = "U" samplestr = dumps(props) lines = samplestr.split("\n") for i in range(len(lines)): lines[i] = COMMENT + lines[i] # sample data for line in lines: write(line + "\n") # header write(HEADER + "\n") # spectral data for i in range(len(spectrum)): write("%s,%s\n" % (spectrum.waves[i], spectrum.amplitudes[i])) first = False
def test_propclass_defaults_setitem_new_override(): defs = Properties({"key": "lock", "horse": "orange"}) p = Properties({"key": "value", "apple": "zebra"}, defaults=defs) p["horse"] = "pony" assert dict(p) == {"key": "value", "apple": "zebra", "horse": "pony"} assert dict(defs) == {"key": "lock", "horse": "orange"}
def test_propclass_defaults_setitem_new(): defs = Properties({"key": "lock", "horse": "orange"}) p = Properties({"key": "value", "apple": "zebra"}, defaults=defs) p["new"] = "old" assert dict(p) == {"key": "value", "apple": "zebra", "new": "old"} assert dict(defs) == {"key": "lock", "horse": "orange"}
def test_propclass_defaults_setProperty_overridden(): defs = Properties({"key": "lock", "horse": "orange"}) p = Properties({"key": "value", "apple": "zebra"}, defaults=defs) p.setProperty("key", "hole") assert dict(p) == {"key": "hole", "apple": "zebra"} assert dict(defs) == {"key": "lock", "horse": "orange"}