def test_2_0_regression(self): t_request = t.Dict({ t.Key('params', optional=True): t.Or(t.List(t.Any()), t.Mapping(t.AnyString(), t.Any())), }) assert t_request.check({'params': { 'aaa': 123 }}) == { 'params': { 'aaa': 123 } }
class TrafaretPoweredAttribute(Attribute): """ froshki.Attribute subclass using trafaret validation system. classmethod validate(klass, input_value) is used for handling trafaret validation, which is to be non-overridable. """ trafaret = trafaret.Any() @classmethod def validate(klass, input_value): try: checked = klass.trafaret.check(input_value) return True, checked except trafaret.DataError as err: return False, err.error
def test_any(self): obj = object() self.assertEqual(t.Any().check(obj), obj)
def test_repr(self): assert repr(t.Any()) == '<Any>'
def test_any(self): obj = object() assert t.Any().check(obj) == obj
def test_any(self): self.assertEqual( (t.Any() >> ignore).check(object()), None )
T.List(T.Dict({ "id": T.String(), "paths": T.List(T.String()), })), "workdir": T.String(), "command": T.String(allow_blank=True), "stack_file": T.String(), "excluded_services": T.List(T.String()), "excluded_volumes": T.List(T.String()), "additional_parameters": T.Any(), T.Key("services_prefix", default="", optional=True): T.String(allow_blank=True), }), "portainer": T.List( T.Dict({ "url": T.String(), T.Key("endpoint_id", optional=True, default=-1): T.Int(), T.Key("username", optional=True, default=""): T.String(allow_blank=True), T.Key("password", optional=True, default=""): T.String(allow_blank=True), "stack_name":
len(model.layers) - 1)) if not isinstance(model.layers[0], InputLayer): warnings.warn( 'First layer of the model is not an input layer. Beware of depth issues.' ) # -------------------------------------------------------- # # Get the intermediate output new_model_output = model.layers[(depth + 1) * -1].output new_model = Model(inputs=model.input, outputs=new_model_output) new_model.layers[-1].outbound_nodes = [] return new_model @t.guard(features=t.Any(), num_pooled_features=t.Int(gte=1)) def _find_pooling_constant(features, num_pooled_features): """ Given a tensor and an integer divisor for the desired downsampled features, this will downsample the tensor to the desired number of features Parameters: ---------- features : Tensor the layer output being downsampled num_pooled_features : int the desired number of features to downsample to Returns: ------- int the integer pooling constant required to correctly splice the layer output for downsampling """
def __init__(self, *keys): self.keys = keys self.name = '[%s]' % ', '.join(self.keys) self.trafaret = t.Any()
return check def property_names(trafaret): checker = t.List(trafaret) def check(data): return checker(list(data.keys())) return check # simple keys that does not provide $ref headache keywords = ( t.Key('enum', optional=True, trafaret=t.List(t.Any) & (lambda consts: t.Or(*(t.Atom(cnst) for cnst in consts)))), t.Key('const', optional=True, trafaret=t.Any() & then(t.Atom)), t.Key('type', optional=True, trafaret=ensure_list(json_schema_type) & then(Any)), # number validation t.Key('multipleOf', optional=True, trafaret=t.Float(gt=0) & then(multipleOf)), t.Key('maximum', optional=True, trafaret=t.Float() & (lambda maximum: t.Float(lte=maximum))), t.Key('exclusiveMaximum', optional=True, trafaret=t.Float() & (lambda maximum: t.Float(lt=maximum))), t.Key('minimum', optional=True, trafaret=t.Float() & (lambda minimum: t.Float(gte=minimum))), t.Key('exclusiveMinimum', optional=True, trafaret=t.Float() & (lambda minimum: t.Float(gt=minimum))), # string t.Key('maxLength', optional=True, trafaret=t.Int(gte=0) & (lambda length: t.String(max_length=length))), t.Key('minLength', optional=True, trafaret=t.Int(gte=0) & (lambda length: t.String(min_length=length))), t.Key('pattern', optional=True, trafaret=Pattern() & (lambda pattern: t.Regexp(pattern))), # array
class ImageFilter: ''' Methods __init__(self, image = [], upper_HSV = [], lower_HSV = []) Initialize the filter. Required inputs are a color image and an initial upper and lower range for the filter. find_shapes(self) Method to adjust the filter criteria until a masked image can be retuned ''' @t.guard(image=t.Any(), upper_HSV=t.List(t.Int(gte=0)), lower_HSV=t.List(t.Int(gte=0))) def __init__(self, image=[], upper_HSV=[], lower_HSV=[]): ''' Initializer. Parameters: ---------- image : list A list representing a color image of shape (image_size, image_size, 3) upper_HSV : list A list of the upper limits of the initial HSV filter lower_HSV: list A list of the lower limits of the initial HSV filter Returns: ---------- None. Initializes and saves the filter object attributes. ''' # Build Filter self.image = image self.upper_HSV = np.array(upper_HSV) self.lower_HSV = np.array(lower_HSV) self.contour_threshold_upper = 10 # Maximum number of contours self.contour_threshold_lower = 1 # Minimum number of contours self.min_shape_size = 150 # Minimum contour size to be counted self.image_size = 299 # Image size self.shape_stepsize = 200 # Step size to reduce minimum contour # size, if the filter can't find minimum # Option to blur and sharpen the image before masking self.sharpen = True # cv2 blurring methods, as a string. Options are: # blur, GaussianBlur, medianBlur, bilateralFilter or none self.filter_type = 'bilateralFilter' ### ERROR CHECKING ### # Verify upper bounds are greater than lower bounds if (upper_HSV[0] <= lower_HSV[0] or upper_HSV[1] <= lower_HSV[1] or upper_HSV[2] <= lower_HSV[2]): raise ValueError('Each value in HSV Upper range muste be ' 'greater than values in Lower HSV') #OpenCV uses H: 0 - 180, S: 0 - 255, V: 0 - 255 if (upper_HSV[0] > 180 or lower_HSV[0] > 180): raise ValueError('Hue must be less than 180') if (upper_HSV[1] > 255 or lower_HSV[1] > 255): raise ValueError('Saturation must be less than 255') if (upper_HSV[2] > 255 or lower_HSV[2] > 255): raise ValueError('Value must be less than 255') def find_shapes(self): ''' Sharpens the image (if self.sharpen is true), resizes the image to specified size and applies the tune_sat function to see if enough contours of a minimum size can be found by adjusting the filter boundaries (see filter_functions). If not, the minimum contour size is stepped down, and the tune_sat function tries again. Once enough contours are found, the masked image is converted back to color. Method returns the maked image in color, the list of contours found by cv2.findContours, and the upper and lower filter boundaries found. ''' contour_counter = 0 image = self.image upper_HSV = self.upper_HSV lower_HSV = self.lower_HSV contour_threshold_upper = self.contour_threshold_upper contour_threshold_lower = self.contour_threshold_lower min_shape_size = self.min_shape_size image_size = self.image_size sharpen = self.sharpen shape_stepsize = self.shape_stepsize image = cv2.resize(image, (image_size, image_size)) image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) if sharpen == True: image = sharpen_image(image, method=self.filter_type) init_sat_upper = upper_HSV[1] init_sat_lower = lower_HSV[1] while contour_counter <= contour_threshold_lower or contour_counter >= contour_threshold_upper: processed_image, contours, upper_HSV, lower_HSV = tune_sat( image, upper_HSV, lower_HSV, min_shape_size, contour_threshold_upper, contour_threshold_lower) contour_counter = count_contours(contours, min_shape_size) min_shape_size = min_shape_size - shape_stepsize if contour_counter >= contour_threshold_lower and contour_counter <= contour_threshold_upper: break upper_HSV[1] = init_sat_upper lower_HSV[1] = init_sat_lower if min_shape_size < 50: print('Contours Found are not in range') break image = cv2.cvtColor(processed_image, cv2.COLOR_HSV2BGR) upper_HSV = np.ndarray.tolist(upper_HSV) lower_HSV = np.ndarray.tolist(lower_HSV) return (image, contours, upper_HSV, lower_HSV)