예제 #1
0
def resnet101v2(model_config,
                input_shape,
                metrics,
                n_classes=2,
                output_bias=None,
                gpus=1):
    '''
    Defines a model based on a pretrained ResNet101V2 for multiclass X-ray classification.
    Note that batch size per GPU should be >= 12 to prevent NaN in batch normalization.
    :param model_config: A dictionary of parameters associated with the model architecture
    :param input_shape: The shape of the model input
    :param metrics: Metrics to track model's performance
    :return: a Keras Model object with the architecture defined in this method
    '''

    # Set hyperparameters
    nodes_dense0 = model_config['NODES_DENSE0']
    lr = model_config['LR']
    dropout = model_config['DROPOUT']
    l2_lambda = model_config['L2_LAMBDA']
    if model_config['OPTIMIZER'] == 'adam':
        optimizer = Adam(learning_rate=lr)
    elif model_config['OPTIMIZER'] == 'sgd':
        optimizer = SGD(learning_rate=lr)
    else:
        optimizer = Adam(learning_rate=lr)  # For now, Adam is default option

    # Set output bias
    if output_bias is not None:
        output_bias = Constant(output_bias)
    print("MODEL CONFIG: ", model_config)

    # Start with pretrained ResNet101V2
    X_input = Input(input_shape, name='input_img')
    base_model = ResNet101V2(include_top=False,
                             weights='imagenet',
                             input_shape=input_shape,
                             input_tensor=X_input)
    X = base_model.output

    # Add custom top
    X = GlobalAveragePooling2D()(X)
    X = Dropout(dropout)(X)
    X = Dense(nodes_dense0,
              kernel_initializer='he_uniform',
              activity_regularizer=l2(l2_lambda))(X)
    X = LeakyReLU()(X)
    X = Dense(n_classes, bias_initializer=output_bias)(X)
    Y = Activation('softmax', dtype='float32', name='output')(X)

    # Set model loss function, optimizer, metrics.
    model = Model(inputs=X_input, outputs=Y)
    model.summary()
    if gpus >= 2:
        model = multi_gpu_model(model, gpus=gpus)
    model.compile(loss='categorical_crossentropy',
                  optimizer=optimizer,
                  metrics=metrics)
    return model
def resnet101v2(input_size):
    input_tensor = Input(shape=(input_size, input_size, 3))
    base_model = ResNet101V2(include_top=False,
                             weights='imagenet',
                             input_shape=(input_size, input_size, 3))

    model = create_model(base_model, input_tensor)

    return model
예제 #3
0
def get_model_resnet(img_shape, img_input, weights, resnet_depth):
    if resnet_depth == 50:
        return ResNet50(include_top=False, weights=weights, input_tensor=img_input, input_shape=img_shape,
                           pooling=None)
    elif resnet_depth == 101:
        return ResNet101V2(include_top=False, weights=weights, input_tensor=img_input, input_shape=img_shape,
                              pooling=None)
    else:
        return ResNet152V2(include_top=False, weights=weights, input_tensor=img_input, input_shape=img_shape,
                              pooling=None)
예제 #4
0
 def __init__(self, data_shape=(224, 224, 3), resnet_version=1, resnet_layer_number=50, num_classes=1000):
   super(ResNet, self).__init__()
   
   weights = None
   if num_classes == 1000 and data_shape == (224, 224, 3):
     weights = 'imagenet'
     
   self.resnet_version = resnet_version
   
   self.data_augmentation = keras.Sequential(
     [
       layers.experimental.preprocessing.RandomFlip(
         "horizontal", 
         input_shape=data_shape),
       layers.experimental.preprocessing.RandomRotation(0.1),
       layers.experimental.preprocessing.RandomZoom(0.1),
     ]
   )
   
   self.rescaling = layers.experimental.preprocessing.Rescaling(1./255)
   
   def preprocess_input(x, data_format=None):
     from tensorflow.keras.applications import imagenet_utils
     return imagenet_utils.preprocess_input(
     x, data_format=data_format, mode='tf')
     #return x
     
   self.preprocess_input = preprocess_input
   
   if resnet_layer_number == 18:
     if resnet_version == 1:
       self.resnet = ResNet18(category_num=num_classes)
     else:
       self.resnet = ResNet18V2(category_num=num_classes)
   elif resnet_layer_number == 50:
     if resnet_version == 1:
       self.resnet = ResNet50(weights=weights, input_shape=data_shape, classes=num_classes)
     else:
       self.resnet = ResNet50V2(weights=weights, input_shape=data_shape, classes=num_classes)
   elif resnet_layer_number == 101:
     if resnet_version == 1:
       self.resnet = ResNet101(weights=weights, input_shape=data_shape, classes=num_classes)
     else:
       self.resnet = ResNet101V2(weights=weights, input_shape=data_shape, classes=num_classes)
   elif resnet_layer_number == 152:
     if resnet_version == 1:
       self.resnet = ResNet152(weights=weights, input_shape=data_shape, classes=num_classes)
     else:
       self.resnet = ResNet152V2(weights=weights, input_shape=data_shape, classes=num_classes)
     
   self.build((None,) + data_shape)
예제 #5
0
def create_image_resnet101v2_model(NUM_CLASS,
                                   isPreTrained=False,
                                   pathToResNet101V2ModelWeights=None):
    resnet101v2 = ResNet101V2(weights='imagenet', include_top=False)
    last_layer = resnet101v2.output
    x = GlobalAveragePooling2D()(last_layer)
    x = Dense(768, activation='relu', name='img_dense_768')(x)
    out = Dense(NUM_CLASS, activation='softmax', name='img_output_layer')(x)
    resnet101v2 = Model(inputs=resnet101v2.input,
                        outputs=out,
                        name="ResNet50V2")

    if not isPreTrained:
        return resnet101v2
    else:
        resnet101v2.load_weights(pathToResNet101V2ModelWeights)
        return resnet101v2, 2
예제 #6
0
def build_resNet101V2(NUM_CLASS):
    ResNet101v2 = ResNet101V2(weights='imagenet', include_top=False)
    #model.summary()
    last_layer = ResNet101v2.output
    x = GlobalAveragePooling2D()(last_layer)
    x = Dense(2048)(x)
    x = Dropout(0.5)(x)
    x = Dense(1024)(x)
    x = Dense(512)(x)
    x = Dropout(0.5)(x)
    # a softmax layer for 2 classes
    out = Dense(NUM_CLASS, activation='softmax', name='output_layer')(x)
    ResNet101v2 = Model(inputs=ResNet101v2.input, outputs=out)
    plot_model(ResNet101v2,
               to_file='multiple_inputs.png',
               show_shapes=True,
               dpi=600,
               expand_nested=False)
    return ResNet101v2, 7
예제 #7
0
def createTextResNet101v2Maximum(max_seq_len, bert_ckpt_file, bert_config_file,
                                 NUM_CLASS):
    with GFile(bert_config_file, "r") as reader:
        bc = StockBertConfig.from_json_string(reader.read())
        bert_params = map_stock_config_to_params(bc)
        bert_params.adapter_size = None
        bert_layer = BertModelLayer.from_params(bert_params, name="bert")

    bert_in = Input(shape=(max_seq_len, ),
                    dtype='int32',
                    name="input_ids_bert")
    bert_inter = bert_layer(bert_in)
    cls_out = Lambda(lambda seq: seq[:, 0, :])(bert_inter)
    cls_out = Dropout(0.5)(cls_out)
    bert_out = Dense(units=768, activation="tanh")(cls_out)  # 768 before
    load_stock_weights(bert_layer, bert_ckpt_file)

    # image models:
    ResNet101v2 = ResNet101V2(weights='imagenet', include_top=False)
    res_out = ResNet101v2.output
    res_out = GlobalAveragePooling2D()(res_out)
    res_out = Dropout(0.5)(res_out)
    res_out = Dense(2048)(res_out)
    res_out = Dropout(0.5)(res_out)
    res_out = Dense(768)(res_out)
    merge = Maximum()([res_out, bert_out])
    # restliche Layer
    x = Dense(2048)(merge)
    x = Dropout(0.5)(x)
    x = Dense(1024)(x)
    x = Dropout(0.5)(x)
    x = Dense(512)(x)
    x = Dropout(0.5)(x)
    output = Dense(NUM_CLASS, activation='softmax', name='output_layer')(x)
    model = Model(inputs=[ResNet101v2.input, bert_in], outputs=output)
    plot_model(model,
               to_file='multiple_inputs_text.png',
               show_shapes=True,
               dpi=600,
               expand_nested=False)

    return model, 14
예제 #8
0
    def model_Initializer(self):

        from tensorflow.keras.layers import Dense, Flatten
        from tensorflow.keras.models import Model
        import tensorflow as tf

        #Resources
        print("Num GPUs Available: ",
              len(tf.config.experimental.list_physical_devices('GPU')))
        print("Using Tensorflow : ", tf.__version__)

        # initializing the network model and excluding the last layer of network
        if self.MODEL == 'VGG16':
            from tensorflow.keras.applications.vgg16 import VGG16
            self.model = VGG16(
                input_shape=self.IMAGE_SIZE + [3],
                weights='imagenet',  #using pretrained imagenet weights
                include_top=False)  #excluding the last layer of network

        if self.MODEL == 'VGG19':
            from tensorflow.keras.applications.vgg19 import VGG19
            self.model = VGG19(
                input_shape=self.IMAGE_SIZE + [3],
                weights='imagenet',  #using pretrained imagenet weights
                include_top=False)  #excluding the last layer of network

        if self.MODEL == 'Xception':
            from tensorflow.keras.applications.xception import Xception
            self.model = Xception(
                input_shape=self.IMAGE_SIZE + [3],
                weights='imagenet',  #using pretrained imagenet weights
                include_top=False)  #excluding the last layer of network

        if self.MODEL == 'ResNet50V2':
            from tensorflow.keras.applications.resnet_v2 import ResNet50V2
            self.model = ResNet50V2(
                input_shape=self.IMAGE_SIZE + [3],
                weights='imagenet',  #using pretrained imagenet weights
                include_top=False)  #excluding the last layer of network

        if self.MODEL == 'ResNet101V2':
            from tensorflow.keras.applications.resnet_v2 import ResNet101V2
            self.model = ResNet101V2(
                input_shape=self.IMAGE_SIZE + [3],
                weights='imagenet',  #using pretrained imagenet weights
                include_top=False)  #excluding the last layer of network

        if self.MODEL == 'ResNet152V2':
            from tensorflow.keras.applications.resnet_v2 import ResNet152V2
            self.model = ResNet152V2(
                input_shape=self.IMAGE_SIZE + [3],
                weights='imagenet',  #using pretrained imagenet weights
                include_top=False)  #excluding the last layer of network

        if self.MODEL == 'InceptionV3':
            from tensorflow.keras.applications.inception_v3 import InceptionV3
            self.model = InceptionV3(
                input_shape=self.IMAGE_SIZE + [3],
                weights='imagenet',  #using pretrained imagenet weights
                include_top=False)  #excluding the last layer of network

        if self.MODEL == 'InceptionResNetV2':
            from tensorflow.keras.applications.inception_resnet_v2 import InceptionResNetV2
            self.model = InceptionResNetV2(
                input_shape=self.IMAGE_SIZE + [3],
                weights='imagenet',  #using pretrained imagenet weights
                include_top=False)  #excluding the last layer of network

        if self.MODEL == 'MobileNetV2':
            from tensorflow.keras.applications.mobilenet_v2 import MobileNetV2
            self.model = MobileNetV2(
                input_shape=self.IMAGE_SIZE + [3],
                weights='imagenet',  #using pretrained imagenet weights
                include_top=False)  #excluding the last layer of network

        if self.MODEL == 'DenseNet121':
            from tensorflow.keras.applications.densenet import DenseNet121
            self.model = DenseNet121(
                input_shape=self.IMAGE_SIZE + [3],
                weights='imagenet',  #using pretrained imagenet weights
                include_top=False)  #excluding the last layer of network

        if self.MODEL == 'DenseNet169':
            from tensorflow.keras.applications.densenet import DenseNet169
            self.model = DenseNet169(
                input_shape=self.IMAGE_SIZE + [3],
                weights='imagenet',  #using pretrained imagenet weights
                include_top=False)  #excluding the last layer of network

        if self.MODEL == 'DenseNet201':
            from tensorflow.keras.applications.densenet import DenseNet201
            self.model = DenseNet201(
                input_shape=self.IMAGE_SIZE + [3],
                weights='imagenet',  #using pretrained imagenet weights
                include_top=False)  #excluding the last layer of network

        # Freezing the layes of the network
        for layer in self.model.layers:
            layer.trainable = False

        #flatterning the last layer
        self.x = Flatten()(self.model.output)

        #Created a dense layer for output
        self.outlayers = Dense(self.count_output_classes,
                               activation='softmax')(self.x)

        #Binding pretrained layers with custom output layer
        self.model = Model(inputs=self.model.input, outputs=self.outlayers)

        #Compile the Model
        self.model.compile(loss='categorical_crossentropy',
                           optimizer='adam',
                           metrics=['accuracy'])
예제 #9
0
 def download_for_url(self, path: str, **kwargs):
     """
     Download the file at the given URL
     :param path:  the path to download
     :param kwargs:  various kwargs for customizing the underlying behavior of
     the model download and setup
     :return: the absolute path to the model
     """
     path_split = path.split('/')
     type = path_split[0]
     weights_file = path_split[1]
     include_top = 'no_top' in weights_file
     if type == 'vgg19':
         ret = VGG19(include_top=include_top, **kwargs)
     elif type == 'vgg16':
         ret = VGG16(include_top=include_top, **kwargs)
     elif type == 'resnet50':
         ret = ResNet50(include_top=include_top, **kwargs)
     elif type == 'resnet101':
         ret = ResNet101(include_top=include_top, **kwargs)
     elif type == 'resnet152':
         ret = ResNet152(include_top=include_top, **kwargs)
     elif type == 'resnet50v2':
         ret = ResNet50V2(include_top=include_top, **kwargs)
     elif type == 'resnet101v2':
         ret = ResNet101V2(include_top=include_top, **kwargs)
     elif type == 'resnet152v2':
         ret = ResNet152V2(include_top=include_top, **kwargs)
     elif type == 'densenet121':
         ret = DenseNet121(include_top=include_top)
     elif type == 'densenet169':
         ret = DenseNet169(include_top=include_top, **kwargs)
     elif type == 'densenet201':
         ret = DenseNet201(include_top=include_top, **kwargs)
     elif type == 'inceptionresnetv2':
         ret = InceptionResNetV2(include_top=include_top, **kwargs)
     elif type == 'efficientnetb0':
         ret = EfficientNetB0(include_top=include_top, **kwargs)
     elif type == 'efficientnetb1':
         ret = EfficientNetB1(include_top=include_top, **kwargs)
     elif type == 'efficientnetb2':
         ret = EfficientNetB2(include_top=include_top, **kwargs)
     elif type == 'efficientnetb3':
         ret = EfficientNetB3(include_top=include_top, **kwargs)
     elif type == 'efficientnetb4':
         ret = EfficientNetB4(include_top=include_top, **kwargs)
     elif type == 'efficientnetb5':
         ret = EfficientNetB5(include_top=include_top, **kwargs)
     elif type == 'efficientnetb6':
         ret = EfficientNetB6(include_top=include_top, **kwargs)
     elif type == 'efficientnetb7':
         efficient_net = EfficientNetB7(include_top=include_top, **kwargs)
     elif type == 'mobilenet':
         ret = MobileNet(include_top=include_top, **kwargs)
     elif type == 'mobilenetv2':
         ret = MobileNetV2(include_top=include_top)
     #  MobileNetV3() missing 2 required positional arguments: 'stack_fn' and 'last_point_ch'
     #elif type == 'mobilenetv3':
     #    mobile_net = MobileNetV3(include_top=include_top, **kwargs)
     elif type == 'inceptionv3':
         ret = InceptionV3(include_top=include_top, **kwargs)
     elif type == 'nasnet':
         ret = NASNetLarge(include_top=include_top, **kwargs)
     elif type == 'nasnet_mobile':
         ret = NASNetMobile(include_top=include_top, **kwargs)
     elif type == 'xception':
         ret = Xception(include_top=include_top, **kwargs)
     model_path = os.path.join(keras_path, weights_file)
     ret.save(model_path)
     return model_path
예제 #10
0
def get_resnetv2(classes=54,
                 depth=50,
                 input_shape=(224, 224, 3),
                 base_layer_trainable=False):
    from tensorflow.keras.applications.resnet_v2 import ResNet50V2, ResNet101V2, ResNet152V2
    assert depth in [50, 101, 152]
    if depth == 50:
        base_model = ResNet50V2(include_top=False, input_shape=input_shape)
        for layer in base_model.layers:
            layer.trainable = base_layer_trainable
        head_model = KL.GlobalMaxPool2D()(base_model.output)
        head_model = KL.Dense(1024,
                              activation='relu',
                              name='00',
                              kernel_initializer='he_uniform')(head_model)
        head_model = KL.Dropout(0.5)(head_model)
        # head_model = KL.Dense(1024, activation='relu', name='1111', kernel_initializer='he_uniform')(head_model)
        # head_model = KL.Dropout(0.5)(head_model)
        if classes == 2:
            head_model = KL.Dense(classes, activation='sigmoid',
                                  name='3333')(head_model)
        else:
            head_model = KL.Dense(classes, activation='softmax',
                                  name='3333')(head_model)
        model = KM.Model(inputs=base_model.input, outputs=head_model)
        return model

    elif depth == 101:
        base_model = ResNet101V2(include_top=False, input_shape=input_shape)
        for layer in base_model.layers:
            layer.trainable = base_layer_trainable
        head_model = KL.GlobalMaxPool2D()(base_model.output)
        head_model = KL.Dense(1024,
                              activation='relu',
                              name='00',
                              kernel_initializer='he_uniform')(head_model)
        head_model = KL.Dropout(0.5)(head_model)
        # head_model = KL.Dense(1024, activation='relu', name='1111', kernel_initializer='he_uniform')(head_model)
        # head_model = KL.Dropout(0.5)(head_model)
        if classes == 2:
            head_model = KL.Dense(classes, activation='sigmoid',
                                  name='3333')(head_model)
        else:
            head_model = KL.Dense(classes, activation='softmax',
                                  name='3333')(head_model)
        model = KM.Model(inputs=base_model.input, outputs=head_model)
        return model

    else:
        base_model = ResNet152V2(include_top=False, input_shape=input_shape)
        for layer in base_model.layers:
            layer.trainable = base_layer_trainable
        head_model = KL.GlobalMaxPool2D()(base_model.output)
        head_model = KL.Dense(1024,
                              activation='relu',
                              name='00',
                              kernel_initializer='he_uniform')(head_model)
        head_model = KL.Dropout(0.5)(head_model)
        head_model = KL.Dense(1024,
                              activation='relu',
                              name='11',
                              kernel_initializer='he_uniform')(head_model)
        head_model = KL.Dropout(0.5)(head_model)
        if classes == 2:
            head_model = KL.Dense(classes, activation='sigmoid',
                                  name='3333')(head_model)
        else:
            head_model = KL.Dense(classes, activation='softmax',
                                  name='3333')(head_model)
        model = KM.Model(inputs=base_model.input, outputs=head_model)
        return model
예제 #11
0
def resnet101v2(model_config,
                input_shape,
                metrics,
                n_classes,
                mixed_precision=False,
                output_bias=None):
    '''
    Defines a model based on a pretrained ResNet50V2 for multiclass US classification.
    :param model_config: A dictionary of parameters associated with the model architecture
    :param input_shape: The shape of the model input
    :param metrics: Metrics to track model's performance
    :param n_classes: # of classes in data
    :param mixed_precision: Whether to use mixed precision (use if you have GPU with compute capacity >= 7.0)
    :param output_bias: bias initializer of output layer
    :return: a Keras Model object with the architecture defined in this method
    '''

    # Set hyperparameters
    nodes_dense0 = model_config['NODES_DENSE0']
    nodes_dense1 = model_config['NODES_DENSE1']
    lr = model_config['LR']
    dropout = model_config['DROPOUT']
    l2_lambda = model_config['L2_LAMBDA']
    optimizer = Adam(learning_rate=lr)
    print("MODEL CONFIG: ", model_config)
    if mixed_precision:
        tf.train.experimental.enable_mixed_precision_graph_rewrite(optimizer)

    if output_bias is not None:
        output_bias = Constant(output_bias)  # Set initial output bias

    # Start with pretrained ResNet101V2
    X_input = Input(input_shape, name='input')
    base_model = ResNet101V2(include_top=False,
                             weights='imagenet',
                             input_shape=input_shape,
                             input_tensor=X_input)
    X = base_model.output

    # Add custom top layers
    X = GlobalAveragePooling2D()(X)
    X = Dropout(dropout)(X)
    X = Dense(nodes_dense0,
              kernel_initializer='he_uniform',
              activation='relu',
              activity_regularizer=l2(l2_lambda))(X)
    X = Dropout(dropout)(X)
    X = Dense(nodes_dense1,
              kernel_initializer='he_uniform',
              activation='relu',
              activity_regularizer=l2(l2_lambda))(X)
    X = Dense(n_classes, bias_initializer=output_bias)(X)
    Y = Activation('softmax', dtype='float32', name='output')(X)

    # Set model loss function, optimizer, metrics.
    model = Model(inputs=X_input, outputs=Y)
    model.summary()
    model.compile(loss='categorical_crossentropy',
                  optimizer=optimizer,
                  metrics=metrics)
    return model
    featureFolderName = 'v2_ResNet_npyData_Crop'
    model = ResNet50V2(weights='imagenet', include_top=False)
    model.summary()

elif model == validModels[3]:  #'InceptionResNetV2':
    from tensorflow.keras.applications.inception_resnet_v2 import InceptionResNetV2
    from keras.applications.inception_resnet_v2 import preprocess_input
    featureFolderName = 'v2_inc_rn'
    model = InceptionResNetV2(weights='imagenet', include_top=False)
    model.summary()

elif model == validModels[4]:  #'ResNet101V2':
    from tensorflow.keras.applications.resnet_v2 import ResNet101V2
    from keras.applications.resnet_v2 import preprocess_input
    featureFolderName = 'v2_101_ResNet_npyData'
    model = ResNet101V2(weights='imagenet', include_top=False)
    model.summary()

elif model == validModels[5]:  #'xception':
    from tensorflow.keras.applications.xception import Xception
    from keras.applications.xception import preprocess_input
    featureFolderName = 'xception_npyData'
    model = Xception(weights='imagenet', include_top=False)
    model.summary()

elif model == validModels[6]:  #'densenet':
    from tensorflow.keras.applications.densenet import DenseNet121
    from keras.applications.densenet import preprocess_input
    featureFolderName = 'densenet_npyData'
    model = DenseNet121(weights='imagenet', include_top=False)
    model.summary()
예제 #13
0
class Tagger:
    """
    This class is designed for image classification featuring ML and pretrained CNN model ResNet.
    """

    os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'

    input_image_size = (400, 400, 3)
    input_tensor = Input(shape=input_image_size)

    model = ResNet101V2(weights='imagenet',
                        include_top=True,
                        input_tensor=input_tensor)

    @staticmethod
    def set_meta_tag(file_path, comment):
        """
        Function sets label to an UserComment tag [EXIF metadata] of an image.

        :param file_path: Path to the file,
        :param comment: Value of a label
        """
        exif_dict = piexif.load(file_path)
        user_comment = piexif.helper.UserComment.dump(str(str(comment)))
        exif_dict["Exif"][piexif.ExifIFD.UserComment] = user_comment
        exif_bytes = piexif.dump(exif_dict)
        piexif.insert(exif_bytes, file_path)

    @staticmethod
    def tag_file(file_name):
        """
        This method classifies one given image.
        :param file_name: Path to the file, that should be classified.
        :return: Tags as list of strings
        """

        f = open('labels.txt', 'w+')

        res = []
        if file_name.endswith('.jpeg') or file_name.endswith('.jpg'):

            # preprocess an image
            img = tf_image.load_img(file_name,
                                    target_size=Tagger.input_image_size[:2])
            img = tf_image.img_to_array(img)
            img = np.expand_dims(img, axis=0)
            img = preprocess_input(img)
            # apply NN and make a prediction
            predictions = Tagger.model.predict(img)
            # decode the results into a list of tuples (class, description, probability)
            # (one such list for each sample in the batch)

            translated_predictions = decode_predictions(predictions, top=2)[0]

            if float(translated_predictions[0][2]) - float(
                    translated_predictions[1][2]) <= 0.08:
                res = [
                    translated_predictions[0][1], translated_predictions[1][1]
                ]
                tf.keras.backend.print_tensor(Tagger.model.layers[-1].output)
            else:
                res = [translated_predictions[0][1]]

        f.close()
        return res

    @staticmethod
    def tag_dir(dir_path='./images', set_meta=False, file_log=False, nimgs=25):
        """
        Use ML to classify your images. Sets 1 or multi labels
        .jpg or .jpeg taken into account
        :param nimgs: Number of images per thread
        :param file_log: Create output file LABELS.TXT with labels
        :param set_meta: Set meta-tag with labeled class
        :param dir_path: Path to the directory where images for classification are located. Default: test_image_set file
        :return: Tuple: 1) list of strings with labels, tuples for multilabel; 2) names of tagged files
        """
        def thread_function(files_range, result):
            """Function for operations being executed inside a thread.

            :param files_range: Set on which files actions should be performed.
            :return:
            """
            if file_log:
                f = open('labels.txt', 'w+')
            i = 0

            for file_name in files_range:
                if file_name.endswith('.jpeg') or file_name.endswith('.jpg'):
                    img_path = dir_path + file_name
                    # preprocess an image
                    img = tf_image.load_img(
                        img_path, target_size=Tagger.input_image_size[:2])
                    img = tf_image.img_to_array(img)
                    img = np.expand_dims(img, axis=0)
                    img = preprocess_input(img)

                    # apply NN and make a prediction
                    predictions = Tagger.model.predict(img)
                    # decode the results into a list of tuples (class, description, probability)
                    # (one such list for each sample in the batch)
                    i += 1
                    translated_predictions = decode_predictions(predictions,
                                                                top=2)[0]
                    if float(translated_predictions[0][2]) <= 0.05:
                        result.append("")
                        if set_meta:
                            Tagger.set_meta_tag(img_path, 'none')
                        if file_log:
                            f.write(
                                str(i) + '.  ' + str(file_name) + ' ' * 5 +
                                '->  none\n')
                    else:
                        if float(translated_predictions[0][2]) - float(
                                translated_predictions[1][2]) <= 0.08:
                            result.append((str(translated_predictions[0][1]),
                                           str(translated_predictions[1][1])))
                            if set_meta:
                                Tagger.set_meta_tag(
                                    img_path, translated_predictions[0][1] +
                                    ', ' + translated_predictions[1][1])
                            if file_log:
                                f.write(
                                    str(i) + '.  ' + str(file_name) + ' ' * 5 +
                                    '->  ' +
                                    str(translated_predictions[0][1]) + ', ' +
                                    str(translated_predictions[1][1]) + '\n')
                        else:
                            result.append(str(translated_predictions[0][1]))
                            if set_meta:
                                Tagger.set_meta_tag(
                                    img_path, translated_predictions[0][1])
                            if file_log:
                                f.write(
                                    str(i) + '.  ' + str(file_name) + ' ' * 5 +
                                    '->  ' +
                                    str(translated_predictions[0][1]) + '\n')
            if file_log:
                f.close()

        #END thread_function

        if dir_path[-1] != '/':
            dir_path = dir_path + '/'

#       All files of proper format that are not included in database
        files = [
            name for name in os.listdir(dir_path)
            if os.path.isfile(os.path.join(dir_path, name)) and (
                name.endswith('.jpeg') or name.endswith('.jpg'))
            and not DB.DataBase.exists(pth=os.path.join(dir_path, name))
        ]
        nfiles = len(files)

        #       Number of threads to run, floor of float result - last thread handles any additional
        nthreads = nfiles // nimgs
        #       nimgs > nfiles
        if nthreads == 0:
            nthreads = 1
#       Threads' function argument to handle results
        res = [[] for i in range(nthreads)]
        #       Files' ranges for threads
        fargs = list()
        for i in range(nthreads):
            start = i * nimgs
            #           Last thread handles additional files
            if i == nthreads - 1:
                fargs.append(files[start:])
            else:
                end = (i + 1) * nimgs - 1
                fargs.append(files[start:end])

#       Execute tasks
        thrds = [
            Thread(target=thread_function, args=[fargs[i], res[i]])
            for i in range(nthreads)
        ]
        for t in thrds:
            t.start()

        for t in thrds:
            t.join()


#       Threads' results to one list
        result = list()
        for r in res:
            for tags in r:
                result.append(tags)

        return result, files