def initialize_regulators(config, kafka_feedback): regulators = {} for regulator in config: control = kafka_feedback.pipe( trace_observable("regulator feedback"), ops.filter(lambda i: i[0] == regulator['feedback']), ops.map(lambda i: i[1] / 1000), pid(rx.concat(rx.just(1.0), rx.never()), -0.001, -0.001, 0.0), #ops.map(lambda i: 1/i if i != 0 else 1.0), ops.map(lambda i: max(min(i, 0.01), 0.0)), trace_observable("regulator"), ) regulators[regulator['control']] = control return regulators
def test_no_subscribe(self): source = Subject() source.pipe( trace_observable(prefix='foo', trace_subscribe=False, date=datetime.datetime(year=2018, month=8, day=3))).subscribe() source.on_next('bar') self.assertEqual('2018-08-03 00:00:00:foo - on_next: bar', self.out.getvalue().strip())
def makinage(aio_scheduler, sources): def on_error(e): raise e config, read_request, http_request = read_config_from_args( sources.argv.argv, sources.file.response, sources.http.response, scheduler=aio_scheduler ) first_config = rx.concat(config.pipe(ops.take(1),), rx.never()) kafka_source = sources.kafka.response.pipe( trace_observable("kafka source1"), ops.replay(), ops.ref_count(), trace_observable("kafka source2"), ) kafka_source.subscribe(on_error=on_error) kafka_request = first_config.pipe( ops.flat_map(lambda i: create_operators( i, config, kafka_source, sources.kafka.feedback.pipe(ops.share()), )), ops.subscribe_on(aio_scheduler), trace_observable("makinage"), ) ''' config.pipe(ops.subscribe_on(aio_scheduler)).subscribe( on_next=print, on_error=print, ) ''' return MakiNageSink( file=file.Sink(request=read_request), http=http.Sink(request=http_request), kafka=kafka.Sink(request=kafka_request), )
def serve(config, model, data): '''Serves a model This operator serves a model. It loads models received on the model observable, and executes it on each item received on the data observable. The configuration observable must contain a serve section with the following fields: * input_field: The input field name used to run inference. * output_field: The output field name where inference result is set. additionally, a "prepare" field can be set if some data transformation is needed before feeding the model. When not present, the input data is converted to a numpy array Args: config: configuration observable. Returns: An observable of predictions. Each item is a copy of the original datay item, with an additional field. The name of the additional field if the one set in output_field. ''' predict = model.pipe( trace_observable(prefix="model", trace_next_payload=False), ops.map(load_mlflow_model), ops.combine_latest(config), ops.starmap(create_model_predict), ) transforms = config.pipe( trace_observable(prefix="prepare", trace_next_payload=False), ops.map(create_transform_functions)) prediction = data.pipe( rs.ops.with_latest_from(transforms, predict), ops.starmap(infer), ops.filter(lambda i: i is not None), ) return prediction,
def test_base_on_completed(self): source = Subject() source.pipe( trace_observable(prefix='foo', date=datetime.datetime(year=2018, month=8, day=3))).subscribe() source.on_completed() self.assertEqual( '2018-08-03 00:00:00:foo - on_subscribe\n' '2018-08-03 00:00:00:foo - on_completed\n' '2018-08-03 00:00:00:foo - dispose', self.out.getvalue().strip())
def multiply(config, values): sink = values.pipe( trace_observable("multiply 1"), rs.ops.with_latest_from(config), ops.starmap(lambda i, c: int(i) * c['config']['multiply']), trace_observable("multiplied"), ops.map(lambda i: str(i))) return sink,