def GradientDatastream(): dataset = datastream.datasets()["gradient"] augmenter = datastream.augmenter() return Datastream.merge([ Datastream(dataset.subset(lambda df: df["class_name"] == class_name)) for class_name in settings.CLASS_NAMES ]).map(lambda example: example.augment(augmenter))
def GradientDatastream(): dataset = evaluate_datastreams()['gradient'].dataset augmenter_ = augmenter() return ( Datastream.merge([ Datastream(dataset.subset( lambda df: df['class_name'] == class_name )) for class_name in settings.CLASS_NAMES ]) .map(lambda example: example.augmented(augmenter_)) )
def evaluate_datastreams(): evaluate_datasets = problem.evaluate_datasets() return { split_name: Datastream(dataset) for split_name, dataset in dict( gradient=evaluate_datasets['train'], compare=evaluate_datasets['compare'], ).items() }
def evaluate_datastreams(): evaluate_datasets = problem.evaluate_datasets() evaluate_datasets['train'] = evaluate_datasets['train'].split( key_column='index', proportions=dict(gradient=0.8, early_stopping=0.2), stratify_column='class_name', filepath='{{cookiecutter.package_name}}/splits/early_stopping.json', ) return { split_name: Datastream(dataset) for split_name, dataset in dict( gradient=evaluate_datasets['train']['gradient'], early_stopping=evaluate_datasets['train']['early_stopping'], compare=evaluate_datasets['compare'], ).items() }
def evaluate_datastreams(): return { split_name: Datastream(dataset) for split_name, dataset in datastream.datasets().items() }
class ResourceApi: datastream = Datastream() sensor = Sensor() reading = Reading() statio = Station()
# Load the backend as specified in configuration if getattr(settings, 'DATASTREAM_BACKEND', None) is not None: backend = settings.DATASTREAM_BACKEND if isinstance(backend, basestring): i = backend.rfind('.') module, attr = backend[:i], backend[i + 1:] try: mod = importlib.import_module(module) except ImportError, e: raise exceptions.ImproperlyConfigured( 'Error importing datastream backend %s: "%s"' % (module, e)) try: cls = getattr(mod, attr) except AttributeError: raise exceptions.ImproperlyConfigured( 'Module "%s" does not define a "%s" datastream backend' % (module, attr)) backend = cls(**getattr(settings, 'DATASTREAM_BACKEND_SETTINGS', {})) def callback(stream_id, granularity, datapoint): signals.new_datapoint.send(sender=datastream, stream_id=stream_id, granularity=granularity, datapoint=datapoint) datastream = Datastream(backend, callback)
def init_datastream(datastream_backend, datastream_backend_settings): backend = datastream_backend if isinstance(backend, basestring): i = backend.rfind('.') module, attr = backend[:i], backend[i + 1:] try: mod = importlib.import_module(module) except ImportError, exception: raise exceptions.ImproperlyConfigured( "Error importing datastream backend %s: %s" % (module, exception)) try: cls = getattr(mod, attr) except AttributeError: raise exceptions.ImproperlyConfigured( "Module '%s' does not define a '%s' datastream backend" % (module, attr)) backend = cls(**datastream_backend_settings) return Datastream(backend) # Load the backend as specified in configuration if getattr(settings, 'DATASTREAM_BACKEND', None) is not None: datastream = init_datastream( settings.DATASTREAM_BACKEND, getattr(settings, 'DATASTREAM_BACKEND_SETTINGS', {}))