Example #1
0
def test_ckps(data_dir,
              auged,
              ckp_dir,
              log_dir,
              model,
              model_kwargs,
              mets,
              device=torch.device("cpu"),
              loss_func=torch.nn.BCELoss(),
              total_amt=16384,
              val_percent=0.25,
              test_amt=768,
              wrapped_function=None,
              workers=0,
              seed=42):
    """
    :param data_dir: the directory to the data
    :param auged: whether or not the data is augmented or not
    :param ckp_dir: the directory to the checkpoint
    :param log_dir: the directory to the logs
    :param model: the class of the model
    :param mets: the metrics to use
    :param device: the device to use
    :param loss_func: the loss function to use
    :param total_amt: the total amount of data to use
    :param val_percent: the percent of data to use for validation
    :param test_amt: the amount of data to use for testing
    :param wrapped_function: the wrapped function ot use
    :param workers: the number of workers to use
    :param seed: the seed to use
    """
    data = Data(data_dir,
                auged,
                total_amt=total_amt,
                val_percent=val_percent,
                test_amt=test_amt,
                wrapped_function=wrapped_function,
                workers=workers,
                device=device,
                verbose=True,
                seed=seed)
    test_data = data.get_test_data()
    for dir_ in os.listdir(ckp_dir):
        mod_ckp_dir = f"{ckp_dir}/{dir_}"
        if os.path.isdir(mod_ckp_dir):
            for ckp in os.listdir(mod_ckp_dir):
                if "FINAL" in ckp:
                    fin_dir = f"{mod_ckp_dir}/{ckp}"
                    name = ckp.replace(".pt", "")
                    logger = Logger(f"{name}_TEST",
                                    log_dir,
                                    mets,
                                    overwrite=True,
                                    verbose=True)
                    mod = model(**model_kwargs)
                    mod.to(device)
                    print(f"Loading model from {fin_dir}")
                    mod, _ = load_ckp(fin_dir, mod, dev=device)
                    name = name.replace("_FINAL.pt", "")
                    test_model(test_data, mod, loss_func, logger, name)
Example #2
0
 def activate_logger(self):
     if not self.__check_logger_status():
         GS.LOGGER = Logger()
         GS.LOGGER.write_to_logger(
             "########################################################################\n"
             "########################### LOGGER ACTIVATED ###########################\n"
             "########################################################################"
         )
     return GS.LOGGER
Example #3
0
def read_json_to_dict(json_fullpath):
    """
    Read a json and return a object created from it.
    Args:
        json_fullpath: json fullpath

    Returns: json object.
    """
    try:
        with open(json_fullpath, 'r+') as outfile:
            json_readed = json.load(outfile)
        return json_readed
    except Exception as error:
        Logger().write_log_error(error)
Example #4
0
 def setup(private_key: int, log_level: LogLevels, file: int) -> NoReturn:
     Logger(LogLevels(log_level))
     Crypto(private_key)
     Store.setup_bn_store(file)
Example #5
0
import json
import traceback

import src.config.GlobalSettings as GS
import src.utils.UtilsFunctions as utils
from src.utils.Prints import pt
from src.utils.Logger import Logger

LOGGER = GS.LOGGER if GS.LOGGER else Logger()


def read_json_to_dict(json_fullpath):
    """
    Read a json and return a object created from it.
    Args:
        json_fullpath: json fullpath

    Returns: json object.
    """
    try:
        with open(json_fullpath, 'r+') as outfile:
            json_readed = json.load(outfile)
        return json_readed
    except Exception as error:
        Logger().write_log_error(error)


def object_to_json(object, attributes_to_delete=None, **kwargs):
    """
    Convert class to json with properties method.
    Args:
Example #6
0
    def run_trial(self, LR, BATCH_SIZE, OPTIM, LOSS):
        """
        Initalise all over again for training on given Learning rate, Batch size, Optimizer and loss function
        This function basically just initialises everything and sends it to the fitmodel
        :param LR: Learning Rate
        :param BATCH_SIZE: Batch Size
        :param OPTIM: Optimiser
        :param LOSS: Loss Function
        :return: The final metric to optimise score from the training
        """
        set_seed(self.seed)

        NAME = f"{self.name}_{str(LR).replace('.', '_')}"
        NAME += f"_{BATCH_SIZE}"

        model = self.model_class(**self.model_kwargs)

        opt = OPTIM(model.parameters(), lr=LR)
        opt_str = str(type(opt)).split("'")[-2].split(".")[-1]
        NAME += f"_{opt_str}"

        loss_func = LOSS()
        loss_func_str = str(type(loss_func)).split("'")[-2].split(".")[-1]
        NAME += f"_{loss_func_str}"

        data = Data(self.DATA_DIR,
                    self.augmented,
                    batch_size=BATCH_SIZE,
                    total_amt=self.total_amt,
                    val_percent=self.val_percent,
                    test_amt=self.test_amt,
                    wrapped_function=self.wrapped_function,
                    workers=self.workers,
                    device=self.device,
                    verbose=self.verbose,
                    seed=self.seed)

        logger = Logger(NAME,
                        self.LOG_DIR,
                        self.metrics_to_use,
                        train_early_stopping=self.tres,
                        test_early_stopping=self.tes,
                        stopping_attention=self.es_attn,
                        overwrite=self.overwrite,
                        verbose=self.verbose)

        checkpointer = Checkpoint(NAME,
                                  self.CKP_DIR,
                                  self.save_every,
                                  overwrite=self.overwrite)

        FitModel(model,
                 data,
                 opt,
                 loss_func,
                 self.epochs,
                 self.device,
                 logger,
                 checkpointer,
                 verbose=self.verbose,
                 seed=self.seed)

        met_final = logger.test_history[self.metric_to_optimise][-1]

        return NAME, met_final
Example #7
0
    def send_message_to_client(self, client_socket, message):
        client_socket.send(bytes(message, "utf-8"))

    def receive_message(self, client_socket):
        try:
            msg = client_socket.recv(HEADER_LENGTH)
            if len(msg) > 0:
                message = msg.decode("utf-8")
                LOGGER.write_to_logger(message)
                print(message)
                message_length = int(message.strip())
                return {
                    "header": msg,
                    "data": client_socket.recv(message_length).decode("utf-8")
                }
            else:
                print("No message found")
                LOGGER.write_to_logger("No message found")
                return False
        except Exception as error:
            return False


if __name__ == "__main__":
    __get_root_project(number_of_descent=4)
    from src.examples.sockets.Parameters import HOST_SERVER, SERVER_PORT, ERROR_LOGGER_SERVER_PATH, LOGGER_SERVER_PATH,\
        HEADER_LENGTH
    from src.utils.Logger import Logger
    LOGGER = Logger(writer_path=LOGGER_SERVER_PATH,
                    error_path=ERROR_LOGGER_SERVER_PATH)
    SERVER = Server()
Example #8
0
def save_all_songs_lyrics():
    """retrieves the lyrics for every song in the raw song csv and save them
    """
    if os.path.isfile(
            os.path.join(data.raw_dataset_path, 'idx_tracks_lyrics_raw.npy')):
        read_idx = np.load(os.path.join(data.raw_dataset_path,
                                        'idx_tracks_lyrics_raw.npy'),
                           allow_pickle=True)
    else:
        read_idx = 0
    chunksize = 50000
    df_artists = pd.read_csv(os.path.join(data.raw_dataset_path,
                                          'artists.csv'),
                             sep='\t',
                             lineterminator='\r',
                             usecols=['arid', 'artist_name'])

    logger = Logger('save_all_lyrics_song', True)
    logger.write_line('Starting to search lyrics from row: {}'.format(
        read_idx * chunksize))

    for idx, df_tracks in enumerate(
            pd.read_csv(os.path.join(data.raw_dataset_path, 'tracks.csv'),
                        chunksize=chunksize,
                        sep='\t',
                        lineterminator='\r',
                        usecols=['tid', 'arid', 'track_name'])):
        if idx <= read_idx:
            continue
        df = df_tracks.merge(df_artists, how='left')
        df = df.drop(['arid'], axis=1)
        lyrics = [None] * len(df)
        tid_rescaled = range(len(df))

        try_again = True
        while try_again:
            try:
                with PoolExecutor(max_workers=300) as executor:
                    for tid, l, _, _ in executor.map(
                            get_genius_info,
                            zip(tid_rescaled, df.track_name, df.artist_name)):
                        lyrics[tid] = l
                try_again = False
            except (requests.exceptions.RequestException, ValueError) as e:
                logger.write_line('Error caught!')
                logger.write_line(e)

        df_to_save = pd.DataFrame({'tid': df.tid.values, 'lyrics': lyrics})
        if idx == 0:
            df_to_save.to_csv(os.path.join(data.raw_dataset_path,
                                           'tracks_lyrics_raw.csv'),
                              index=False)
        else:
            df_to_save.to_csv(os.path.join(data.raw_dataset_path,
                                           'tracks_lyrics_raw.csv'),
                              mode='a',
                              index=False)
        logger.write_line('Done with searching lyrics until row: {}'.format(
            idx * chunksize))
        np.save(
            os.path.join(data.raw_dataset_path, 'idx_tracks_lyrics_raw.npy'),
            [idx])
Example #9
0
        self.log.write("Client: {} CONNECTED".format(address))
        done = False
        while not done:
            try:
                req = split_req(recv_req(sock, log))
                if req[0] in OperationType.list():
                    ServerManeger.init(sock, self.log)
                    ServerManeger.do_req(req, address)

                else:
                    log.write("Invalid request by {}".format(address))
            except OSError:
                done = True
                sock.close()
                self.clients.remove(sock)
                continue


    def run(self):
        log.write("Starting Server!")
        while True:
            self.accept()


if __name__ == '__main__':
    log = Logger(log_name="Server")
    server = MultiServer(log)

    server.run()
    server.s_s.close()
Example #10
0
                    LOGGER.write_to_logger(msg.decode("utf-8"))
                    print(msg.decode("utf-8"))
                    answered = True
                    option_selected = str(
                        input("Select an option to send to server:\n"))
                    self.send_message_to_server(message="Client message: " +
                                                option_selected)
                    #self.tcp_socket.close()
                else:
                    print("No message found")
                    LOGGER.write_to_logger("No message found")
                time.sleep(1)  # One second per attemp
                attemps += 1
                LOGGER.write_to_logger("Attemp number: " + str(attemps))

            except Exception as error:
                LOGGER.write_log_error(error, str(error))
                break

    def send_message_to_server(self, message):
        self.tcp_socket.send(bytes(message, "utf-8"))


if __name__ == "__main__":
    __get_root_project(number_of_descent=4)
    from src.examples.sockets.Parameters import SERVER_PORT, HOST_SERVER, PUBLIC_IP, ERROR_LOGGER_CLIENT_PATH, \
        LOGGER_CLIENT_PATH
    from src.utils.Logger import Logger
    LOGGER = Logger(writer_path=LOGGER_CLIENT_PATH,
                    error_path=ERROR_LOGGER_CLIENT_PATH)
    CLIENT = Client(port=SERVER_PORT, host=PUBLIC_IP)
Example #11
0
from src.network import ClientManeger
from src.ui.UIHandler import HorizonMusicApp
from src.utils.Logger import Logger
from src.utils.Constants import Network


class HorizonMusic:
    def __init__(self, logger):
        self.logger = logger
        self.app = HorizonMusicApp(self.logger)


if __name__ == "__main__":
    client = socket.socket()
    log = Logger(log_name="{}-log".format(str(socket.gethostname())))

    params = Network()

    try:
        client.connect((params.SERVER_IP, params.PORT))

    except ConnectionRefusedError as err:
        log.write(
            "Connection timeout! - You are using Horizon Music offline --> {}".
            format(err))
        params.IS_ONLINE = False

    finally:
        log.write("App Starting!")
        ClientManeger.init(client, log, params.IS_ONLINE)
Example #12
0
 def get_instance():
     """ Static access method. """
     if LoggerHandler.__instance is None:
         LoggerHandler.__instance = Logger()
     return LoggerHandler.__instance