def mark_failed(self, experiment, traceback): operations = [] path = parse_path(SYSTEM_FAILED_ATTRIBUTE_PATH) traceback_values = [ LogStrings.ValueType(val, step=None, ts=time.time()) for val in traceback.split("\n") ] operations.append(AssignBool(path=path, value=True)) operations.append( LogStrings( values=traceback_values, path=parse_path(MONITORING_TRACEBACK_ATTRIBUTE_PATH), )) self._execute_operations(experiment, operations)
def _get_subpath_suggestions(self, path_prefix: str = None, limit: int = 1000) -> List[str]: parsed_path = parse_path(path_prefix or "") return list( itertools.islice(self._structure.iterate_subpaths(parsed_path), limit))
def send_channels_values(self, experiment, channels_with_values): send_operations = [] for channel_with_values in channels_with_values: channel_value_type = channel_with_values.channel_type operation = channel_value_type_to_operation(channel_value_type) if channel_value_type == ChannelValueType.IMAGE_VALUE: # IMAGE_VALUE requires minor data modification before it's sent data_transformer = deprecated_img_to_alpha_image else: # otherwise use identity function as transformer data_transformer = lambda e: e ch_values = [ alpha_operation.LogSeriesValue( value=data_transformer(ch_value.value), step=ch_value.x, ts=ch_value.ts, ) for ch_value in channel_with_values.channel_values ] send_operations.append( operation( path=alpha_path_utils.parse_path( self._get_channel_attribute_path( channel_with_values.channel_name, channel_with_values.channel_namespace, )), values=ch_values, )) self._execute_operations(experiment, send_operations)
def add(self, values: Union[str, Iterable[str]], wait: bool = False) -> None: """Adds the provided tag or tags to the run's tags. Args: values (str or collection of str): Tag or tags to be added. .. note:: If you want you can use emojis in your tags eg. "Exploration 🧪" wait (bool, optional): If `True` the client will wait to send all tracked metadata to the server first. This makes the call synchronous. Defaults to `False`. You may also want to check `add docs page`_. .. _add types docs page: https://docs.neptune.ai/api-reference/field-types#.add """ verify_type("values", values, (str, Iterable)) with self._run.lock(): attr = self._run.get_attribute(self._path) if not attr: attr = StringSet(self._run, parse_path(self._path)) self._run.set_attribute(self._path, attr) attr.add(values, wait)
def _create_channel( self, experiment: Experiment, channel_id: str, channel_name: str, channel_type: ChannelType, channel_namespace: ChannelNamespace, ): """This function is responsible for creating 'fake' channels in alpha projects. Since channels are abandoned in alpha api, we're mocking them using empty logging operation.""" operation = channel_type_to_operation(channel_type) log_empty_operation = operation( path=alpha_path_utils.parse_path( self._get_channel_attribute_path(channel_name, channel_namespace)), values=[], ) # this operation is used to create empty attribute self._execute_operations( experiment=experiment, operations=[log_empty_operation], ) return ChannelWithLastValue( AlphaChannelWithValueDTO( channelId=channel_id, channelName=channel_name, channelType=channel_type.value, x=None, y=None, ))
def update_tags(self, experiment, tags_to_add, tags_to_delete): operations = [ alpha_operation.AddStrings( path=alpha_path_utils.parse_path( alpha_consts.SYSTEM_TAGS_ATTRIBUTE_PATH), values=tags_to_add, ), alpha_operation.RemoveStrings( path=alpha_path_utils.parse_path( alpha_consts.SYSTEM_TAGS_ATTRIBUTE_PATH), values=tags_to_delete, ), ] self._execute_operations( experiment=experiment, operations=operations, )
def fetch(self) -> dict: attributes = self._backend.fetch_atom_attribute_values( self._container_id, self._container_type, self._path) run_struct = ContainerStructure() prefix_len = len(self._path) for attr_name, attr_type, attr_value in attributes: run_struct.set( parse_path(attr_name)[prefix_len:], (attr_type, attr_value)) return self._collect_atom_values(run_struct.get_structure())
def _remove_attribute(self, experiment, str_path: str): """Removes given attribute""" self._execute_operations( experiment=experiment, operations=[ alpha_operation.DeleteAttribute( path=alpha_path_utils.parse_path(str_path), ) ], )
def reset_channel(self, experiment, channel_id, channel_name, channel_type): op = channel_type_to_clear_operation(ChannelType(channel_type)) attr_path = self._get_channel_attribute_path(channel_name, ChannelNamespace.USER) self._execute_operations( experiment=experiment, operations=[op(path=alpha_path_utils.parse_path(attr_path))], )
def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: # pylint: disable=protected-access source_path = self.source_handler._path source_attr = self.source_handler._run.get_attribute(source_path) if source_attr and source_attr.supports_copy: return visitor.copy_value(source_type=type(source_attr), source_path=parse_path(source_path)) else: raise Exception( f"{type(source_attr).__name__} doesn't support copying")
def sync(self, wait: bool = True) -> None: with self._lock: if wait: self._op_processor.wait() attributes = self._backend.get_attributes(self._id, self.container_type) self._structure.clear() for attribute in attributes: self._define_attribute(parse_path(attribute.path), attribute.type)
def download_file_attribute(self, path: str, destination: Optional[str]): for attr in self._attributes: if attr.path == path: _type = attr.type if _type == AttributeType.FILE: self._backend.download_file(self._id, ContainerType.RUN, parse_path(path), destination) return raise MetadataInconsistency( "Cannot download file from attribute of type {}".format( _type)) raise ValueError("Could not find {} attribute".format(path))
def set_property(self, experiment, key, value): """Save attribute casted to string under `alpha_consts.PROPERTIES_ATTRIBUTE_SPACE` namespace""" self._execute_operations( experiment=experiment, operations=[ alpha_operation.AssignString( path=alpha_path_utils.parse_path( f"{alpha_consts.PROPERTIES_ATTRIBUTE_SPACE}{key}"), value=str(value), ) ], )
def upload_source_code(self, experiment, source_target_pairs): dest_path = alpha_path_utils.parse_path( alpha_consts.SOURCE_CODE_FILES_ATTRIBUTE_PATH) file_globs = [ source_path for source_path, target_path in source_target_pairs ] upload_files_operation = alpha_operation.UploadFileSet( path=dest_path, file_globs=file_globs, reset=True, ) self._execute_upload_operations_with_400_retry(experiment, upload_files_operation)
def upload_files(self, value: Union[str, Iterable[str]], wait: bool = False) -> None: if is_collection(value): verify_collection_type("value", value, str) else: verify_type("value", value, str) with self._run.lock(): attr = self._run.get_attribute(self._path) if not attr: attr = FileSet(self._run, parse_path(self._path)) self._run.set_attribute(self._path, attr) attr.upload_files(value, wait)
def create_hardware_metric(self, experiment, metric): operations = [] gauges_count = len(metric.gauges) for gauge in metric.gauges: path = parse_path( self._get_attribute_name_for_metric(metric.resource_type, gauge.name(), gauges_count)) operations.append( ConfigFloatSeries(path, min=metric.min_value, max=metric.max_value, unit=metric.unit)) self._execute_operations(experiment, operations)
def copy(self, value: ValueCopy, wait: bool = False): # pylint: disable=protected-access with self._container.lock(): source_path = value.source_handler._path source_attr = value.source_handler._run.get_attribute(source_path) self._enqueue_operation( CopyAttribute( self._path, value.source_handler._container_id, value.source_handler._container_type, parse_path(source_path), source_attr.__class__, ), wait, )
def track_files(self, path: str, destination: str = None, wait: bool = False) -> None: """Creates an artifact tracking some files. You may also want to check `track_files docs page`_. https://docs.neptune.ai/api-reference/field-types#.track_files """ with self._run.lock(): attr = self._run.get_attribute(self._path) if not attr: attr = Artifact(self._run, parse_path(self._path)) self._run.set_attribute(self._path, attr) attr.track_files(path=path, destination=destination, wait=wait)
def define( self, path: str, value: Union[Value, int, float, str, datetime], wait: bool = False, ) -> Attribute: if isinstance(value, Value): pass elif isinstance(value, Handler): value = ValueCopy(value) elif is_bool(value): value = Boolean(value) elif is_int(value): value = Integer(value) elif is_float(value): value = Float(value) elif is_string(value): value = String(value) elif isinstance(value, datetime): value = Datetime(value) elif is_float_like(value): value = Float(float(value)) elif is_dict_like(value): value = Namespace(value) elif is_string_like(value): value = String(str(value)) else: raise TypeError("Value of unsupported type {}".format(type(value))) parsed_path = parse_path(path) with self._lock: old_attr = self._structure.get(parsed_path) if old_attr: raise MetadataInconsistency( "Attribute or namespace {} is already defined".format( path)) attr = ValueToAttributeVisitor(self, parsed_path).visit(value) self._structure.set(parsed_path, attr) attr.process_assignment(value, wait) return attr
def upload(self, value, wait: bool = False) -> None: """Uploads provided file under specified field path. Args: value (str or File): Path to the file to be uploaded or `File` value object. wait (bool, optional): If `True` the client will wait to send all tracked metadata to the server. This makes the call synchronous. Defaults to `False`. Examples: >>> import neptune.new as neptune >>> run = neptune.init() >>> # Upload example data ... run["dataset/data_sample"].upload("sample_data.csv") >>> # Both the content and the extension is stored ... # When downloaded the filename is a combination of path and the extension ... run["dataset/data_sample"].download() # data_sample.csv Explicitely create File value object >>> from neptune.new.types import File >>> run["dataset/data_sample"].upload(File("sample_data.csv")) You may also want to check `upload docs page`_. .. _upload docs page: https://docs.neptune.ai/api-reference/field-types#.upload """ value = FileVal.create_from(value) with self._run.lock(): attr = self._run.get_attribute(self._path) if not attr: attr = File(self._run, parse_path(self._path)) self._run.set_attribute(self._path, attr) attr.upload(value, wait)
def send_hardware_metric_reports(self, experiment, metrics, metric_reports): operations = [] metrics_by_name = {metric.name: metric for metric in metrics} for report in metric_reports: metric = metrics_by_name.get(report.metric.name) gauges_count = len(metric.gauges) for gauge_name, metric_values in groupby( report.values, lambda value: value.gauge_name): metric_values = list(metric_values) path = parse_path( self._get_attribute_name_for_metric( metric.resource_type, gauge_name, gauges_count)) operations.append( LogFloats( path, [ LogFloats.ValueType( value.value, step=None, ts=value.timestamp) for value in metric_values ], )) self._execute_operations(experiment, operations)
def _get_init_experiment_operations( self, name, description, params, properties, tags, hostname, entrypoint) -> List[alpha_operation.Operation]: """Returns operations required to initialize newly created experiment""" init_operations = list() # Assign experiment name init_operations.append( alpha_operation.AssignString( path=alpha_path_utils.parse_path( alpha_consts.SYSTEM_NAME_ATTRIBUTE_PATH), value=name, )) # Assign experiment description init_operations.append( alpha_operation.AssignString( path=alpha_path_utils.parse_path( alpha_consts.SYSTEM_DESCRIPTION_ATTRIBUTE_PATH), value=description, )) # Assign experiment parameters for p_name, p_val in params.items(): parameter_type, string_value = self._get_parameter_with_type(p_val) operation_cls = (alpha_operation.AssignFloat if parameter_type == "double" else alpha_operation.AssignString) init_operations.append( operation_cls( path=alpha_path_utils.parse_path( f"{alpha_consts.PARAMETERS_ATTRIBUTE_SPACE}{p_name}"), value=string_value, )) # Assign experiment properties for p_key, p_val in properties.items(): init_operations.append( AssignString( path=alpha_path_utils.parse_path( f"{alpha_consts.PROPERTIES_ATTRIBUTE_SPACE}{p_key}"), value=str(p_val), )) # Assign tags if tags: init_operations.append( alpha_operation.AddStrings( path=alpha_path_utils.parse_path( alpha_consts.SYSTEM_TAGS_ATTRIBUTE_PATH), values=set(tags), )) # Assign source hostname if hostname: init_operations.append( alpha_operation.AssignString( path=alpha_path_utils.parse_path( alpha_consts.SYSTEM_HOSTNAME_ATTRIBUTE_PATH), value=hostname, )) # Assign source entrypoint if entrypoint: init_operations.append( alpha_operation.AssignString( path=alpha_path_utils.parse_path( alpha_consts.SOURCE_CODE_ENTRYPOINT_ATTRIBUTE_PATH), value=entrypoint, )) return init_operations
def log( self, value, step: Optional[float] = None, timestamp: Optional[float] = None, wait: bool = False, **kwargs, ) -> None: """Logs the provided value or a collection of values. Available for following field types (`Field types docs page`_): * `FloatSeries` * `StringSeries` * `FileSeries` Args: value: Value or collection of values to be added to the field. step (float or int, optional, default is None): Index of the log entry being appended. Must be strictly increasing. Defaults to `None`. timestamp(float or int, optional): Time index of the log entry being appended in form of Unix time. If `None` current time (`time.time()`) will be used as a timestamp. Defaults to `None`. wait (bool, optional): If `True` the client will wait to send all tracked metadata to the server. This makes the call synchronous. Defaults to `False`. .. _Field types docs page: https://docs.neptune.ai/api-reference/field-types """ verify_type("step", step, (int, float, type(None))) verify_type("timestamp", timestamp, (int, float, type(None))) with self._run.lock(): attr = self._run.get_attribute(self._path) if not attr: if is_collection(value): if value: first_value = next(iter(value)) else: raise ValueError( "Cannot deduce value type: `value` cannot be empty" ) else: first_value = value if is_float(first_value): attr = FloatSeries(self._run, parse_path(self._path)) elif is_string(first_value): attr = StringSeries(self._run, parse_path(self._path)) elif FileVal.is_convertable(first_value): attr = FileSeries(self._run, parse_path(self._path)) elif is_float_like(first_value): attr = FloatSeries(self._run, parse_path(self._path)) elif is_string_like(first_value): attr = StringSeries(self._run, parse_path(self._path)) else: raise TypeError("Value of unsupported type {}".format( type(first_value))) self._run.set_attribute(self._path, attr) attr.log(value, step=step, timestamp=timestamp, wait=wait, **kwargs)
def _get_dest_and_ext(target_name): qualified_target_name = f"{alpha_consts.ARTIFACT_ATTRIBUTE_SPACE}{target_name}" return alpha_path_utils.parse_path( normalize_file_name(qualified_target_name))
def pop(self, path: str, wait: bool = False) -> None: verify_type("path", path, str) with self._lock: self._pop_impl(parse_path(path), wait)
def set_attribute(self, path: str, attribute: Attribute) -> Optional[Attribute]: with self._lock: return self._structure.set(parse_path(path), attribute)
def get_attribute(self, path: str) -> Optional[Attribute]: with self._lock: return self._structure.get(parse_path(path))