def print_stats(y_data):
    unique, counts = np.unique(y_data, return_counts=True)
    for i in range(0, unique.shape[0]):
        temp = int(counts[i] / len(y_data) * 100)
        log.print_info(CATEGORIES[unique[i]] + " (" + str(unique[i]) +
                       ") -> " + str(counts[i]) + " (" + str(temp) + "%)")
    log.print_info("Total: " + str(np.sum(counts)))
Example #2
0
    def search(self, keywords, category='210'):
        """
			@param string keywords   
			@param string category

			@return array of Torrent 
		"""
        if category in self.CATEGORY:
            catid = self.CATEGORY[category]
        else:
            catid = '210'

        response = []
        logger.print_info("Recherche en cours ...", eol='')
        json_response = self.__call_json(
            "torrents/search/%s&cid=%s" % (keywords, catid), {
                'offset': '0',
                'limit': '200'
            })
        logger.print_ok()

        for torrent in json_response['torrents']:
            response.append(Torrent(torrent))

        return response
Example #3
0
def make_folder(folder_name):
    try:
        # Create target Directory
        os.makedirs(folder_name)
        log.print_info(" Directory " + str(folder_name) + " created ")
    except FileExistsError:
        log.print_info(" Directory " + str(folder_name) + " already exists")
Example #4
0
    def add_user(self, user):
        cursor = self._get_cursor()

        passwd = self.__hash_password(user, user)

        query = "SELECT iduser FROM users WHERE username='******'".format(user)

        cursor.execute(query)
        for _ in cursor:
            return None

        timestamp = self.__get_timestamp()

        query = "INSERT INTO users (username, userpass, created) VALUES ('{0}','{1}','{2}')".format(
            user, passwd, timestamp)

        self._get_cursor().execute(query)
        self.connection.commit()

        print_info(__name__, "User {0} created".format(user))

        cursor = self._get_cursor()
        query = ("SELECT iduser, username, userpass, created "
                 "FROM users "
                 "WHERE username = '******'").format(user)

        cursor.execute(query)
        iduser, username, userpass, created = cursor[0]
        cursor.close()

        return self.__get_token(userpass, user, iduser, created)
Example #5
0
	def loadToken(self):
		"""
			Process Authentification
		"""
		logger.print_info("Authentification ... ",eol='')
		json_decode = self.__call_json('auth',{'username':self.USERNAME,'password':self.PASSWORD})
		self.token = json_decode['token']
		logger.print_ok()
Example #6
0
def load_model(model_name, drop_rate):
    model_path = os.path.join(MODEL_FOLDER, model_name)
    input_tensor = tf.keras.Input(shape=(224, 224, 3))
    bayesian_model = models.Model(
        input_tensor, bayesian_cnn(inputs=input_tensor, drop_rate=drop_rate))
    bayesian_model.load_weights(model_path, by_name=False)
    log.print_info("Loaded model " + model_name)
    return bayesian_model
Example #7
0
 def is_connected(self):
     try:
         logger.print_cmd('Send MQTT ping')
         self.__c.ping()
         self.__connected = True
         logger.print_info('Connected from MQTT Broker')
     except:
         self.disconnect(False)
     return self.__connected
Example #8
0
	def download(self,tid):
		"""
			@param int tid : id of the expected torrent
			@return torrent data
		"""
		logger.print_info("Download torrent ... ",eol='')
		torrent = self.__call("torrents/download/%s" % tid)
		logger.print_ok()
		return torrent
Example #9
0
    def download(self, tid):
        """
			@param int tid : id of the expected torrent
			@return torrent data
		"""
        logger.print_info("Download torrent ... ", eol='')
        torrent = self.__call("torrents/download/%s" % tid)
        logger.print_ok()
        return torrent
Example #10
0
	def processDownloadLocal(self,torrent_datas):
		torrent_datas = torrent_replace_announce( torrent_datas, self.config.get('tracker','url') )

		tmp_filename = "%s/%s.torrent" % ( tempfile.gettempdir(),self.options.download )
		write_into_file( tmp_filename, torrent_datas )
		
		cmd =  self.config.get('global','torrent-client') % tmp_filename 
		logger.print_info("Lancement du torrent [ %s ] " % cmd)
		os.system(cmd)
	def processDownloadLocal(self,torrent_datas):
		torrent_datas = torrent_replace_announce( torrent_datas, self.config.get('tracker','url') )

		tmp_filename = "%s/%s.torrent" % ( tempfile.gettempdir(),self.options.download )
		write_into_file( tmp_filename, torrent_datas )
		
		cmd =  self.config.get('global','torrent-client') % tmp_filename 
		logger.print_info("Lancement du torrent [ %s ] " % cmd)
		os.system(cmd)
Example #12
0
    def delete_user(self, user):
        cursor = self._get_cursor()

        query = "DELETE FROM users WHERE username='******'".format(user)
        cursor.execute(query)
        self.connection.commit()

        print_info(__name__, "User {0} deleted".format(user))

        cursor.close()
Example #13
0
    def is_connected(self):
        logger.print_cmd('Test WIFI Connection')
        ret = self.__nic.isconnected()

        if not ret:
            logger.print_info('Wifi Disconnected!')
        else:
            logger.print_info('Wifi Connected!')

        return ret
Example #14
0
    def loadToken(self):
        """
			Process Authentification
		"""
        logger.print_info("Authentification ... ", eol='')
        json_decode = self.__call_json('auth', {
            'username': self.USERNAME,
            'password': self.PASSWORD
        })
        self.token = json_decode['token']
        logger.print_ok()
def dataset_split(x, y, p, test_factor=0.5, random_state=None):
    log.print_debug("Splitting dataset")
    dataset = []
    for i in range(0, len(y)):
        dataset.append([x[i], y[i], p[i]])
    random.seed(random_state)
    r = {p_e: random.random() for x_e, y_e, p_e in dataset}
    dataset.sort(key=lambda item: r[item[2]])
    train_size = int(len(dataset) - int(len(dataset) * test_factor))
    before_different = train_size - 2
    after_different = train_size
    while dataset[before_different][2] == dataset[train_size - 1][2]:
        before_different = before_different - 1
    while dataset[after_different][2] == dataset[train_size - 1][2]:
        after_different = after_different + 1
    if train_size - before_different < after_different - train_size:
        X_train = np.asarray(dataset)[:before_different + 1, 0]
        y_train = np.asarray(dataset)[:before_different + 1, 1]
        X_test = np.asarray(dataset)[before_different + 1:, 0]
        y_test = np.asarray(dataset)[before_different + 1:, 1]
        in_train_patients = np.unique(
            np.asarray(dataset)[:before_different + 1, 2])
        in_test_patients = np.unique(
            np.asarray(dataset)[before_different + 1:, 2])
    else:
        X_train = np.asarray(dataset)[:after_different + 1, 0]
        y_train = np.asarray(dataset)[:after_different + 1, 1]
        X_test = np.asarray(dataset)[after_different + 1:, 0]
        y_test = np.asarray(dataset)[after_different + 1:, 1]
        in_train_patients = np.unique(
            np.asarray(dataset)[:after_different + 1, 2])
        in_test_patients = np.unique(
            np.asarray(dataset)[after_different + 1:, 2])

    log.print_info(" Dataset shape : " + str(X_train.shape) + " " +
                   str(y_train.shape) + str(X_test.shape) + " " +
                   str(y_test.shape))

    X_train, y_train = balance_set(X_train, y_train, in_train_patients)
    X_test, y_test = balance_set(X_test, y_test, in_test_patients)
    return X_train, X_test, y_train, y_test
Example #16
0
def run_webserver():
    logger.print_cmd('Starting Webserver')
    import socket as socket
    import ujson as json

    s = socket.socket()
    s.settimeout(TIMEOUT)
    s.bind(SERVER_ADDRESS)
    s.listen(REQUEST_QUEUE_SIZE)

    logger.print_info('Serving HTTP on port {port} ...'.format(port=MY_PORT))

    while True:
        conn, addr = s.accept()
        request = conn.recv(1024)
        request = str(request)
        logger.print_info('%s - %s' % (addr[0], request))
        json_str = json.dumps(web_page())
        response = json_str + '\r\n'
        conn.send(response)
        conn.close()
Example #17
0
	def search(self,keywords,category='210'):
		"""
			@param string keywords   
			@param string category

			@return array of Torrent 
		"""
		if category in self.CATEGORY:
			catid = self.CATEGORY[ category ]
		else:
			catid = '210'
		
		response = []
		logger.print_info("Recherche en cours ...",eol='')
		json_response = self.__call_json("torrents/search/%s&cid=%s" % (keywords,catid), {'offset':'0', 'limit':'200'})
		logger.print_ok()

		for torrent in json_response['torrents']:
			response.append( Torrent(torrent) )

		return response
Example #18
0
	def processDownloadTransmission(self,torrent_datas):
		torrent_datas = torrent_replace_announce( torrent_datas, self.config.get('transmission','tracker') )

		logger.print_info('Connexion au server transmission ... ', eol='')
		tc = transmissionrpc.Client( self.config.get('transmission','host') ,
		 	port=self.config.get('transmission','port') ,
		 	user=self.config.get('transmission','username'),
		 	password=self.config.get('transmission','password'))
		logger.print_ok()

		logger.print_info("Upload du torrent ... ",eol='')
		torrent = tc.add_torrent(base64.b64encode(torrent_datas))
		logger.print_ok()

		torrent = tc.get_torrent(torrent.id)
		while torrent.progress < 100:
			sys.stdout.write('\r %.2f%% [%-100s] ' % ( torrent.progress, "="*int(torrent.progress)+">" ))
			sys.stdout.flush()
			torrent = tc.get_torrent(torrent.id)
			time.sleep(1)
		print '\r 100%% [%s]   '%('='*100)
		logger.print_success( 'Download complet' )
		tc.stop_torrent(torrent)
	def processDownloadTransmission(self,torrent_datas):
		torrent_datas = torrent_replace_announce( torrent_datas, self.config.get('transmission','tracker') )

		logger.print_info('Connexion au server transmission ... ', eol='')
		tc = transmissionrpc.Client( self.config.get('transmission','host') ,
		 	port=self.config.get('transmission','port') ,
		 	user=self.config.get('transmission','username'),
		 	password=self.config.get('transmission','password'))
		logger.print_ok()

		logger.print_info("Upload du torrent ... ",eol='')
		torrent = tc.add_torrent(base64.b64encode(torrent_datas))
		logger.print_ok()

		torrent = tc.get_torrent(torrent.id)
		while torrent.progress < 100:
			sys.stdout.write('\r %.2f%% [%-100s] ' % ( torrent.progress, "="*int(torrent.progress)+">" ))
			sys.stdout.flush()
			torrent = tc.get_torrent(torrent.id)
			time.sleep(1)
		print '\r 100%% [%s]   '%('='*100)
		logger.print_success( 'Download complet' )
		tc.stop_torrent(torrent)
def mc_predict_from_path(iterations, file_path, pred_folder, drop_rate,
                         crop_size):
    mc_predictions = []
    slidename = str(path.basename(file_path))
    flattened_image_name = slidename.replace(".svs",
                                             "_" + str(crop_size) + ".bin", 1)
    path_ = path.join(MAP_FOLDER, flattened_image_name)
    if os.path.isfile(path_):
        batch_to_predict_np = read_blob(flattened_image_name, MAP_FOLDER)
    else:
        slide_ = slide.open_slide(file_path)
        slide_size = slide.get_slide_size(slide_)
        save_blob(slide_size, slidename.replace(".svs", ".info", 1),
                  MAP_FOLDER)
        log.print_info("Slide size : " + str(slide_size))
        pool = []
        x_max = ceil(slide_size[0] / crop_size)
        y_max = ceil(slide_size[1] / crop_size)
        log.print_info("Matrix size : " + str(y_max) + " " + str(x_max))
        batch_to_predict = [None] * x_max * y_max
        valid_bit_list = [None] * x_max * y_max
        for y in range(0, y_max - 1):
            pool.append(
                Thread(target=custom_crop,
                       args=(slide_, y, batch_to_predict, valid_bit_list,
                             x_max, crop_size)))
            pool[-1].start()
        t = Thread(target=custom_crop_last,
                   args=(slide_, y_max - 1, batch_to_predict, valid_bit_list,
                         x_max, crop_size))
        t.start()
        for p in pool:
            p.join()
        t.join()
        valid_bit_np = np.asarray(valid_bit_list)
        save_blob(
            valid_bit_np,
            slidename.replace(".svs", "") + "_" + str(crop_size) + ".vbit",
            MAP_FOLDER)
        batch_to_predict_np = np.asarray(batch_to_predict)
        save_blob(batch_to_predict_np, flattened_image_name, MAP_FOLDER)
        del valid_bit_list
        del valid_bit_np
        del batch_to_predict

    for i in range(0, iterations):
        log.print_info("Step " + str(i))
        prediction_list = cnn.predict_from_model_multithread(
            batch_to_predict_np, drop_rate)
        mc_predictions.append(np.asarray(prediction_list))
        save_blob(
            mc_predictions,
            slidename.replace(".svs", "") + "_" + str(crop_size) + "_" +
            str(iterations) + ".pred", pred_folder)
        del prediction_list
        gc.collect()
    del batch_to_predict_np
Example #21
0
def get_prediction_matrix_multithread(slidename):
    slide_ = slide.open_slide(os.path.join(MAP_FOLDER, slidename))
    slide_size = slide.get_slide_size(slide_)
    log.print_info("Slide size : " + str(slide_size))

    pool = []

    x_max = ceil(slide_size[0] / CROP_SIZE)
    y_max = ceil(slide_size[1] / CROP_SIZE)

    log.print_info("Matrix size : " + str(y_max) + " " + str(x_max))
    batch_to_predict = [None] * y_max
    valid_bit_matrix = [None] * y_max
    for y in range(0, y_max - 1):
        pool.append(
            Thread(target=custom_crop,
                   args=(slide_, y, batch_to_predict, valid_bit_matrix,
                         x_max)))
        pool[-1].start()
    t = Thread(target=custom_crop_last,
               args=(slide_, y_max - 1, batch_to_predict, valid_bit_matrix,
                     x_max))
    t.start()
    for p in pool:
        p.join()
    t.join()

    batch_to_predict_np = np.asarray(batch_to_predict)
    print(batch_to_predict_np.shape)
    prediction_list = [[]] * batch_to_predict_np.shape[0]
    for i in range(0, batch_to_predict_np.shape[0]):
        cnn.predict_from_model_multithread(batch_to_predict_np[i][:][:][:],
                                           prediction_list, i)
    prediction_matrix = np.asarray(prediction_list)
    print(prediction_matrix.shape)
    return utils.list_np_to_pil(
        batch_to_predict_np, utils.COLOR), prediction_matrix, valid_bit_matrix
Example #22
0
def compile_model(x_train, y_train, x_test, y_test, drop_rate):

    if tf.test.is_built_with_cuda:
        if tf.test.is_gpu_available(cuda_only=False,
                                    min_cuda_compute_capability=None):
            log.print_error("MAYBE GPU IS USED")
        else:
            log.print_warning("NO GPU IS USED")
    else:
        log.print_warning("THIS VERSION OF TENSORFLOW DOES NOT USES CUDA")
    input_tensor = tf.keras.Input(shape=x_train[0].shape)
    bayesian_model = models.Model(
        input_tensor, bayesian_cnn(inputs=input_tensor, drop_rate=drop_rate))
    opt = tf.keras.optimizers.Adam(lr=LEARNING_RATE, decay=DECAY)
    bayesian_model.compile(loss='sparse_categorical_crossentropy',
                           optimizer=opt,
                           metrics=['accuracy'])
    model_name = str(x_train[0].shape[0]) + "_" + str(N_EPOCH) + "_" + str(BATCH_SIZE) + "_" + str(LEARNING_RATE) \
                 + "_" + str(DECAY) + "_" + str(drop_rate) + "_" + str(USE_BIAS) + "_" + str(DENSE_SIZE) + "_" \
                 + str(SEPARABLE_CONVOLUTION) + "_local.h5"
    bayesian_model.summary()
    # Save model skeleton
    if not os.path.isdir(SUMMARY_FOLDER):
        os.makedirs(SUMMARY_FOLDER)
    summary_path = os.path.join(SUMMARY_FOLDER, model_name + ".txt")
    with open(summary_path, 'w') as f:
        with redirect_stdout(f):
            bayesian_model.summary()

    bayesian_train = bayesian_model.fit(x_train,
                                        y_train,
                                        batch_size=BATCH_SIZE,
                                        epochs=N_EPOCH,
                                        validation_data=(x_test, y_test),
                                        shuffle=True)
    # Save model and weights
    if not os.path.isdir(MODEL_FOLDER):
        os.makedirs(MODEL_FOLDER)
    model_path = os.path.join(MODEL_FOLDER, model_name)
    bayesian_model.save_weights(model_path)
    log.print_info('Saved trained model at %s ' % model_path)

    # Score trained model.
    scores = bayesian_model.evaluate(x_test, y_test, verbose=1)
    log.print_info('Test loss : ' + str(scores[0]))
    log.print_info('Test accuracy : ' + str(scores[1]))
def open_dataset():
    x_path = path.join(SET_FOLDER, "X.pickle")
    y_path = path.join(SET_FOLDER, "y.pickle")
    p_path = path.join(SET_FOLDER, "p.pickle")

    if not os.path.isdir(SET_FOLDER):
        os.makedirs(SET_FOLDER)

    if os.path.isfile(x_path) and os.path.isfile(y_path) and os.path.isfile(
            p_path):
        log.print_debug("Opening saved sets in " + str(SET_FOLDER))
        pickle_in = open(x_path, "rb")
        X = pickle.load(pickle_in)
        pickle_in = open(y_path, "rb")
        y = pickle.load(pickle_in)
        pickle_in = open(p_path, "rb")
        p = pickle.load(pickle_in)
    else:
        X, y, p = load_datasets(1344, 2240, 3136)
        log.print_debug("Saving and opening sets in " + str(SET_FOLDER))
        pickle_out = open(x_path, "wb")
        pickle.dump(X, pickle_out)
        pickle_out.close()
        pickle_out = open(y_path, "wb")
        pickle.dump(y, pickle_out)
        pickle_out.close()
        pickle_out = open(p_path, "wb")
        pickle.dump(p, pickle_out)
        pickle_out.close()

    log.print_info(" Dataset shape : " + str(len(X)) + " " + str(len(y)) +
                   " " + str(len(p)))

    if not os.path.isdir(path.join(SET_FOLDER, str(RANDOM_STATE))):
        os.makedirs(path.join(SET_FOLDER, str(RANDOM_STATE)))
    x_train_path = path.join(SET_FOLDER, str(RANDOM_STATE), "X_train.pickle")
    y_train_path = path.join(SET_FOLDER, str(RANDOM_STATE), "y_train.pickle")
    x_test_path = path.join(SET_FOLDER, str(RANDOM_STATE), "X_test.pickle")
    y_test_path = path.join(SET_FOLDER, str(RANDOM_STATE), "y_test.pickle")
    if os.path.isfile(x_train_path) and os.path.isfile(
            y_train_path) and os.path.isfile(x_test_path) and os.path.isfile(
                y_test_path):
        pickle_in = open(x_train_path, "rb")
        X_train = pickle.load(pickle_in)
        pickle_in = open(y_train_path, "rb")
        y_train = pickle.load(pickle_in)
        pickle_in = open(x_test_path, "rb")
        X_test = pickle.load(pickle_in)
        pickle_in = open(y_test_path, "rb")
        y_test = pickle.load(pickle_in)
    else:
        X_train, X_test, y_train, y_test = dataset_split(
            X, y, p, test_factor=TEST_SIZE, random_state=RANDOM_STATE)
        pickle_out = open(x_train_path, "wb")
        pickle.dump(X_train, pickle_out)
        pickle_out.close()
        pickle_out = open(y_train_path, "wb")
        pickle.dump(y_train, pickle_out)
        pickle_out.close()
        pickle_out = open(x_test_path, "wb")
        pickle.dump(X_test, pickle_out)
        pickle_out.close()
        pickle_out = open(y_test_path, "wb")
        pickle.dump(y_test, pickle_out)
        pickle_out.close()

    return X_train, y_train, X_test, y_test
 def processLogging(self, url):
     event = re.search('event=(\w+)&', url)
     if event:
         logger.print_info("event %s" % event.group(1))
     else:
         logger.print_info(url)
Example #25
0
def train(drop_rate):
    X_train, y_train, X_test, y_test = dm.open_dataset()
    log.print_info(" TRAIN STATs")
    log.print_info(" Train set shape : " + str(X_train.shape) + " " +
                   str(y_train.shape))
    log.print_info(" Train set type : " + str(X_train.dtype))
    dm.print_stats(y_train)
    log.print_info(" TEST STATs")
    log.print_info(" Test set shape : " + str(X_test.shape) + " " +
                   str(y_test.shape))
    log.print_info(" Test set type : " + str(X_test.dtype))
    dm.print_stats(y_test)
    convNet.compile_model(X_train, y_train, X_test, y_test, drop_rate)
Example #26
0
 def processLogging(self,url):
     event = re.search('event=(\w+)&', url)
     if event:
         logger.print_info("event %s" % event.group(1))
     else:
         logger.print_info(url)