예제 #1
0
    def __init__(self, filename):
        with open(filename) as f:
            data = f.read().splitlines()
        self.dim = int(data[0])
        self.popNum = int(data[1])
        self.cRate = float(data[5])
        self.tn_size = int(data[6])
        self.mRate = float(data[7])
        self.hasConst = util.str_to_bool(data[3])
        self.restrictions = int(data[4])
        self.isMin = util.str_to_bool(data[2])
        self.zeta = float(data[8])
        self.dirCoeff = float(data[9])
        self.generations = int(data[len(data) - 1])
        line = data[len(data) - 4]
        self.lowBound = line.split()
        self.lowBound = list(map(float, self.lowBound))
        line = data[len(data) - 3]
        self.uppBound = line.split()
        self.uppBound = list(map(float, self.uppBound))
        self.funcName = data[len(data) - 2]

        # GAME RELATED PARAMETERS
        self.allDRate = float(data[10])
        self.allCRate = float(data[11])
        self.TFTRate = float(data[12])
        self.numGames = int(data[13])
        self.numRounds = int(data[14])
        self.alpha = float(data[15])
        self.beta = float(data[16])
예제 #2
0
    def __init__(self, filename):
        with open(filename) as f:
            data = f.read().splitlines()
        self.dim = int(data[0])
        self.popNum = int(data[1])
        self.cRate = float(data[5])
        self.tn_size = int(data[6])
        self.mRate = float(data[7])
        self.hasConst = util.str_to_bool(data[3])
        self.restrictions = int(data[4])
        self.isMin = util.str_to_bool(data[2])
        self.zeta = float(data[8])
        self.dirCoeff = float(data[9])
        self.generations = int(data[len(data) - 1])
        line = data[len(data) - 4]
        self.lowBound = line.split()
        self.lowBound = list(map(float, self.lowBound))
        line = data[len(data) - 3]
        self.uppBound = line.split()
        self.uppBound = list(map(float, self.uppBound))
        self.funcName = data[len(data) - 2]

        #GAME RELATED PARAMETERS
        self.allDRate = float(data[10])
        self.allCRate = float(data[11])
        self.TFTRate = float(data[12])
        self.numGames = int(data[13])
        self.numRounds = int(data[14])
        self.alpha = float(data[15])
        self.beta = float(data[16])
예제 #3
0
def get_search_query():
    query = request.args.get("query", None)
    is_oa = request.args.get("is_oa", None)

    if is_oa is not None:
        try:
            is_oa = str_to_bool(is_oa)
        except ValueError:
            if is_oa == 'null':
                is_oa = None
            else:
                abort_json(400, "is_oa must be 'true' or 'false'")

    if not query:
        abort_json(400, "query parameter is required")

    start_time = time()
    response = fulltext_search_title(query, is_oa)
    sorted_response = sorted(response, key=lambda k: k['score'], reverse=True)

    for api_response in sorted_response:
        doi = api_response['response']['doi']
        version_suffix = re.findall(ur'[./](v\d+)$', doi, re.IGNORECASE)

        if version_suffix:
            title = api_response['response']['title']
            title = u'{} ({})'.format(title, version_suffix[0].upper())
            api_response['response']['title'] = title

    elapsed_time = elapsed(start_time, 3)
    return jsonify({"results": sorted_response, "elapsed_seconds": elapsed_time})
예제 #4
0
파일: encoder.py 프로젝트: PPinto22/PROMOS
 def __init__(self, keep_first=False, smooth_factor=0.8):
     super().__init__()
     self.keep_first = util.str_to_bool(keep_first)
     self.smooth_factor = float(smooth_factor)
     self.continuous = self.smooth_factor > 0
     self.length = 0
     self.frequencies = {}  # Map<Column, Map<Category, Frequency>>
예제 #5
0
def interactive_game(n=500):
    human_name = input("Okay Human! what is your name? ")
    show_opponent_hand = util.str_to_bool(
        input("Do you want the AI's cards to be visible to you? (y/n)"))
    show_deck = util.str_to_bool(
        input(
            "Do you want the contents of the deck to be visible to you? (y/n)")
    )

    ai_name = "Monty Carlos"
    player_names = [human_name, ai_name]

    print("Okay, let's flip a coin to see who may begin the game")
    whose_turn_ = np.random.randint(2)
    print("Very well! ", player_names[whose_turn_], " may begin! \n")

    interactive_game = InteractiveKaribaGame(kariba_moismcts.Kariba(
        player_names=player_names, whose_turn_=whose_turn_),
                                             show_deck,
                                             show_opponent_hand,
                                             n=n)

    interactive_game.play_game()
예제 #6
0
    def post(self):
        pagesize = util.str_to_int(flask.request.form.get("pagesize"),
                                   default=10,
                                   min_value=10,
                                   max_value=100)
        target = util.str_one_of(flask.request.form.get("target"), "blank",
                                 ["self", "blank"])
        suggest = util.str_to_bool(flask.request.form.get("suggest", "off"),
                                   True, "on", "off")

        flask.session["pagesize"] = pagesize
        flask.session["target"] = target
        flask.session["suggest"] = suggest

        return flask.redirect(flask.url_for("main"))
def shapely_process(is_pickle, is_load, mask_img_dir, coord_save_dir,
                    coord_file_name):
    """

    :param is_pickle:
    :param is_load:
    :param mask_img_dir:
    :param coord_save_dir:
    :param coord_file_name:
    :return:
    """
    str_bool_dic = str_to_bool()

    is_pickle = str_bool_dic[is_pickle]
    is_load = str_bool_dic[is_load]

    if is_pickle:
        cell_nuclei_seg_store(mask_img_dir=mask_img_dir,
                              coord_save_dir=coord_save_dir,
                              coord_file_name=coord_file_name)

        if is_load:
            load_seg_coord(coord_save_dir=coord_save_dir,
                           coord_file_name=coord_file_name)
예제 #8
0
파일: setupeisd.py 프로젝트: davas301/eisd
    def _check_params(self):
        """
        Check to make sure the input valid has a valid parameter list
        :return: a boolean stating whether the input file is valid
        """

        # first check required inputs
        if self.keys_['PDBLIST'] is None:
            print "PDBLIST input is required and was not found"
            return False
        if self.keys_['SUB_SIZE'] is None:
            print "SUB_SIZE input is required and was not found"
            return False
        if self.keys_['SAVE_FILE'] is None:
            print "SAVE_FILE input is required and was not found"
            return False

        # now make sure things are the correct type:
        for bool_key in ['USE_JCOUP', 'USE_SHIFT', 'RUN_SHIFTX']:
            try:
                self.keys_[bool_key] = str_to_bool(self.keys_[bool_key])
            except ValueError:
                print "%s is not a valid value for %s. Must be a boolean" % (str(
                    self.keys_[bool_key]), bool_key)
                return False

        for int_key in ['SUB_SIZE', 'N_ITER', 'PRIOR_UNI_M']:
            try:
                self.keys_[int_key] = int(self.keys_[int_key])
            except ValueError:
                print "%s is not a valid value for %s. Must be integer" % (str(
                    self.keys_[int_key]), int_key)
                return False

        # Now make sure paths exist:
        if self.keys_['USE_JCOUP']:
            if not os.path.exists(self.keys_['JCOUP_PATH']):
                print "%s is not a valid path to a j coupling file" % \
                      self.keys_['JCOUP_PATH']
                return False

        if self.keys_['USE_SHIFT']:
            if not os.path.exists(self.keys_['SHIFT_PATH']):
                print "%s is not a valid path to a chemical shift file" % \
                      self.keys_['SHIFT_PATH']
                return False
            if self.keys_['RUN_SHIFTX']:
                if not os.path.exists(self.keys_['SHIFTX_EXE']):
                    print "%s is not a valid path to a SHIFTX2 executable" % \
                          self.keys_['RUN_SHIFTX']
                return False

        # check cooling schedule parameters:
        default_cool_params = {
            'gaussian': [1, 2],
            'linear': [self.keys_['N_ITER'] + 1, 1]
        }
        cool_scheds = ['gaussian', 'linear']
        if self.keys_['COOL_SCHED'] not in cool_scheds:
            print "%s is not a valid cooling schedule. Please input " \
                  "'gaussian, or 'linear'"
            return False
        else:

            for cs in cool_scheds:
                if self.keys_['COOL_SCHED'] == cs:
                    if self.keys_['COOL_T0'] is None:
                        self.keys_['COOL_T0'] = default_cool_params[cs][0]
                    else:
                        try:
                            self.keys_['COOL_T0'] = float(self.keys_['COOL_T0'])
                        except ValueError:
                            print "%s is not a valid starting temperature. " \
                                  "Please input a float."
                            return False
                    if self.keys_['COOL_SCALE'] is None:
                        self.keys_['COOL_SCALE'] = default_cool_params[cs][1]
                    else:
                        try:
                            self.keys_['COOL_SCALE'] = float(
                                self.keys_['COOL_SCALE'])
                        except ValueError:
                            print "%s is not a valid cooling scale. " \
                                  "Please input a float."
                            return False

        priors = ['uniform']
        if self.keys_['PRIOR'] not in priors:
            print "%s is not a valid prior. Please input 'uniform' " \
                  "or ... thats it"
            return False
        return True
예제 #9
0
def compare_cpu_util(micro_config, micro_utilization, macro_config,
                     macro_utilization):

    services = micro_config.sections()
    for micro in services:
        if util.str_to_bool(micro_config.get(micro, 'auto_scale')):

            print_cpu_util(micro, micro_utilization, micro_config)

            high_cpu = check_high_cpu(micro, micro_config, micro_utilization)
            low_cpu = check_low_cpu(micro, micro_config, micro_utilization)

            if (not high_cpu) and (not low_cpu):

                print("Within CPU limit. Nothing to do for service: " + micro +
                      "!\n")

            elif high_cpu:

                # First get the macroservice runnning the service
                macro = plan.get_macroservice(micro)

                # Print the cpu info
                print_cpu_util(macro, macro_utilization, macro_config)

                start = datetime.now()
                result = parse_dockbeat(micro)
                stop = datetime.now()
                print("It took %s seconds to process dockbeat data!" % (str(
                    (stop - start).seconds)))
                #result = False
                if result == False:
                    # Check if Macro Autoscaling is set to True
                    if util.str_to_bool(macro_config.get(macro, 'auto_scale')):
                        # Then check whether the macroservice can handle the load to spin another microservice
                        if check_high_cpu(macro, macro_config,
                                          macro_utilization):
                            execute.scale_macroservice(
                                macro, int(macro_config.get(macro, 'up_step')))
                    else:
                        print("Autoscaling for this macro: " + str(macro) +
                              " is not set. Hence skipping...")

                    #Print time
                    st = datetime.fromtimestamp(
                        time.time()).strftime('%Y-%m-%d %H:%M:%S')
                    print("AUTOSCALING AT THIS TIME: " + st)
                    # Finally, scale up the microservice
                    execute.scale_microservice(
                        micro, int(micro_config.get(micro, 'up_step')))

                else:
                    print("Anomaly Detected! Skip Autoscaling!!")
            else:
                # Which means it's low low_cpu
                print("LOW CPU UTIL!")
                # First get the macroservice runnning the service
                macro = plan.get_macroservice(micro)

                # Print the cpu info
                print_cpu_util(macro, macro_utilization, macro_config)

                #After finalized anomaly detection mechanism
                #result = parse_dockbeat(micro)
                result = False

                if result == False:

                    #Print time
                    st = datetime.fromtimestamp(
                        time.time()).strftime('%Y-%m-%d %H:%M:%S')
                    print("DOWNSCALING AT THIS TIME: " + st)

                    # Scale down the microservice first, just to be on the safe side
                    execute.scale_microservice(
                        micro, int(micro_config.get(micro, 'down_step')))

                    # Check if Macro Autoscaling is set to True
                    if util.str_to_bool(macro_config.get(macro, 'auto_scale')):

                        # Then check whether the macroservice's cpu_util is too low (so we can remove one)
                        if check_low_cpu(macro, macro_config,
                                         macro_utilization):
                            execute.scale_macroservice(
                                macro,
                                int(macro_config.get(macro, 'down_step')))
                    else:
                        print("Autoscaling for this macro: " + str(macro) +
                              " is not set. Hence skipping...")
                else:
                    print("Anomaly Detected! Skip Autoscaling!!")

            print("\n-------------- EVALUATION COMPLETED FOR MICROSERVICE: " +
                  micro + " --------------\n")
예제 #10
0
파일: setupeisd.py 프로젝트: dhbrookes/eisd
    def _check_params(self):
        """
        Check to make sure the input valid has a valid parameter list
        :return: a boolean stating whether the input file is valid
        """

        # first check required inputs
        if self.keys_['PDBLIST'] is None:
            print "PDBLIST input is required and was not found"
            return False
        if self.keys_['SUB_SIZE'] is None:
            print "SUB_SIZE input is required and was not found"
            return False
        if self.keys_['SAVE_FILE'] is None:
            print "SAVE_FILE input is required and was not found"
            return False

        # now make sure things are the correct type:
        for bool_key in ['USE_JCOUP', 'USE_SHIFT', 'RUN_SHIFTX']:
            try:
                self.keys_[bool_key] = str_to_bool(self.keys_[bool_key])
            except ValueError:
                print "%s is not a valid value for %s. Must be a boolean" % (
                    str(self.keys_[bool_key]), bool_key)
                return False

        for int_key in ['SUB_SIZE', 'N_ITER', 'PRIOR_UNI_M']:
            try:
                self.keys_[int_key] = int(self.keys_[int_key])
            except ValueError:
                print "%s is not a valid value for %s. Must be integer" % (str(
                    self.keys_[int_key]), int_key)
                return False

        # Now make sure paths exist:
        if self.keys_['USE_JCOUP']:
            if not os.path.exists(self.keys_['JCOUP_PATH']):
                print "%s is not a valid path to a j coupling file" % \
                      self.keys_['JCOUP_PATH']
                return False

        if self.keys_['USE_SHIFT']:
            if not os.path.exists(self.keys_['SHIFT_PATH']):
                print "%s is not a valid path to a chemical shift file" % \
                      self.keys_['SHIFT_PATH']
                return False
            if self.keys_['RUN_SHIFTX']:
                if not os.path.exists(self.keys_['SHIFTX_EXE']):
                    print "%s is not a valid path to a SHIFTX2 executable" % \
                          self.keys_['RUN_SHIFTX']
                return False

        # check cooling schedule parameters:
        default_cool_params = {
            'gaussian': [1, 2],
            'linear': [self.keys_['N_ITER'] + 1, 1]
        }
        cool_scheds = ['gaussian', 'linear']
        if self.keys_['COOL_SCHED'] not in cool_scheds:
            print "%s is not a valid cooling schedule. Please input " \
                  "'gaussian, or 'linear'"
            return False
        else:

            for cs in cool_scheds:
                if self.keys_['COOL_SCHED'] == cs:
                    if self.keys_['COOL_T0'] is None:
                        self.keys_['COOL_T0'] = default_cool_params[cs][0]
                    else:
                        try:
                            self.keys_['COOL_T0'] = float(
                                self.keys_['COOL_T0'])
                        except ValueError:
                            print "%s is not a valid starting temperature. " \
                                  "Please input a float."
                            return False
                    if self.keys_['COOL_SCALE'] is None:
                        self.keys_['COOL_SCALE'] = default_cool_params[cs][1]
                    else:
                        try:
                            self.keys_['COOL_SCALE'] = float(
                                self.keys_['COOL_SCALE'])
                        except ValueError:
                            print "%s is not a valid cooling scale. " \
                                  "Please input a float."
                            return False

        priors = ['uniform']
        if self.keys_['PRIOR'] not in priors:
            print "%s is not a valid prior. Please input 'uniform' " \
                  "or ... thats it"
            return False
        return True
예제 #11
0
def get_search_query(query):

    start_time = time()

    query = query.replace(u"_", u" ")
    if request.headers.getlist("X-Forwarded-For"):
        ip = request.headers.getlist("X-Forwarded-For")[0]
    else:
        ip = request.remote_addr

    if not request.args.get("automated", None):
        log_query(query, ip)

    no_live_calls = request.args.get("no-live-calls", "")
    nocache = request.args.get("nocache", "true")
    return_full_api_response = True
    if request.args.get("minimum", ""):
        return_full_api_response = False

    query_entities = get_entities_from_query(query)
    print "query_entities", query_entities
    getting_entity_lookup_elapsed = elapsed(start_time, 3)

    # page starts at 1 not 0
    page = 1
    try:
        page = int(request.args.get("page"))
    except:
        pass

    if page > 10:
        abort_json(
            400,
            u"Page too large. API currently only supports 10 pages right now.")

    if request.args.get("pagesize"):
        pagesize = int(request.args.get("pagesize"))
    else:
        pagesize = 10
    if pagesize > 100:
        abort_json(400, u"pagesize too large; max 100")

    try:
        oa_only = str_to_bool(request.args.get("oa", "false"))
    except:
        oa_only = False

    if nocache:
        print u"skipping cache"
    else:
        if query_entities and len(query_entities) == 1 and page == 1:
            cached_response = get_cached_api_response(query_entities[0],
                                                      oa_only)
            if cached_response and cached_response[0]:
                (api_response, collected_date) = cached_response
                total_time = elapsed(start_time, 3)
                api_response["_cached_on"] = collected_date.isoformat()
                api_response["_timing"] = {"total": total_time}
                print "got response!!!"
                return jsonify(api_response)

    (pubs_to_sort, time_to_pmids_elapsed,
     time_for_pubs_elapsed) = fulltext_search_title(
         query, query_entities, oa_only, full=return_full_api_response)

    initializing_publist_start_time = time()
    # sorted_pubs = sorted(pubs_to_sort, key=lambda k: k.adjusted_score, reverse=True)
    # selected_pubs = sorted_pubs[(pagesize * (page-1)):(pagesize * page)]
    # selected_pmids = [p.pmid for p in selected_pubs]

    sorted_pubs = sorted(pubs_to_sort,
                         key=lambda k: k["adjusted_score"],
                         reverse=True)
    sorted_pubs = [p for p in sorted_pubs]
    selected_pubs = sorted_pubs[(pagesize * (page - 1)):(pagesize * page)]

    selected_dois = [p["doi"] for p in selected_pubs]
    print selected_dois

    selected_pubs_full = []
    if selected_dois:
        selected_pubs_full += db.session.query(PubDoi).filter(
            PubDoi.doi.in_(selected_dois)).options(
                orm.undefer_group('full')).all()

    selected_pubs_full = [p for p in selected_pubs_full
                          if not p.suppress]  # get rid of retracted ones
    for my_pub in selected_pubs_full:
        my_pub.adjusted_score = [
            p["adjusted_score"] for p in sorted_pubs
            if p["doi"] == my_pub.display_doi
        ][0]

    my_pub_list = PubList(pubs=selected_pubs_full)
    initializing_publist_elapsed = elapsed(initializing_publist_start_time, 3)

    set_dandelions_start_time = time()
    if not no_live_calls:
        my_pub_list.set_dandelions()
    set_dandelions_elapsed = elapsed(set_dandelions_start_time)
    set_pictures_start_time = time()
    my_pub_list.set_pictures()
    set_pictures_elapsed = elapsed(set_pictures_start_time)

    to_dict_start_time = time()
    results = my_pub_list.to_dict_serp_list(full=return_full_api_response)

    response = {
        "results": results,
        "page": page,
        "oa_only": oa_only,
        "total_num_pubs": min(100, len(pubs_to_sort)),
        "query_entities": query_entities
    }
    if return_full_api_response:
        response["annotations"] = my_pub_list.to_dict_annotation_metadata()

    to_dict_elapsed = elapsed(to_dict_start_time, 3)
    total_time = elapsed(start_time, 3)

    response["_timing"] = {
        "9 total": total_time,
        "1 getting_entity_lookup_elapsed": getting_entity_lookup_elapsed,
        "2 identify_pmids_for_top_100": time_to_pmids_elapsed,
        "3 loading_top_100_data_for_sorting": time_for_pubs_elapsed,
        "4 loading_final_10_full_pubs": initializing_publist_elapsed,
        "5 set_dandelions_elapsed": set_dandelions_elapsed,
        "6 set_pictures_elapsed": set_pictures_elapsed,
        "7 to_dict_elapsed": to_dict_elapsed,
    }

    print u"finished query for {}: took {} seconds".format(
        query, elapsed(start_time))
    return jsonify(response)
예제 #12
0
def updatestation():
    ret_data = dict()
    try:
        if request.method == 'POST':
            userlevel = User.get_fromid(current_user.get_id()).level
            if (int(userlevel) == int(User.ADMINLEVEL)):

                sitename = request.form['sitename']
                shadowName = request.form['shadowName']

                site = ChargingSite.get_site(sitename)

                if (site == None):
                    ret_data['result'] = False
                    ret_data['reason'] = 'sitename is invalid'
                else:
                    message = ''
                    message += 'update station information, site:'
                    message += sitename

                    message += ', station:'
                    message += shadowName

                    if ('serialNumber' in request.form):
                        serialNumber = request.form['serialNumber']
                        message += ', SerialNumber:'
                        message += serialNumber
                    else:
                        serialNumber = None

                    if ('charge_max' in request.form):
                        charge_max = int(request.form['charge_max'])
                        message += ', charge_max:'
                        message += str(charge_max)
                    else:
                        charge_max = None

                    if ('activate' in request.form):
                        activate = util.str_to_bool(request.form['activate'])
                        message += ', activate:'
                        message += request.form['activate']
                    else:
                        activate = None

                    if ('present_power' in request.form):
                        present_power = int(request.form['present_power'])
                        message += ', present_power:'
                        message += str(present_power)
                    else:
                        present_power = None

                    if (site.update_station(shadowName, serialNumber,
                                            charge_max, present_power,
                                            activate)):
                        ret_data['result'] = True
                        LogController.addEventLogging(
                            userid=current_user.get_id(), detail=message)
                    else:
                        ret_data['result'] = False
                        ret_data[
                            'reason'] = 'station is invalid or database error'
            else:
                ret_data['result'] = False
                ret_data['reason'] = 'user level is restricetd'
        else:
            ret_data['result'] = False
    except:
        print('error')

    print(json.dumps(ret_data))
    return jsonify(ret_data)
예제 #13
0
def train_model(model_name,
                model,
                row_start=None,
                row_end=None,
                step=None,
                initial_epoch=0,
                end_epoch=1,
                time_limit=None):
    if initial_epoch >= end_epoch:
        logging.error('initial_epoch(%d) >= end_epoch(%d).')
        return None
    if 'batch_size' not in config:
        config['batch_size'] = default_batch_size
    if 'does_shuffle' not in config:
        config['does_shuffle'] = default_does_shuffle
    if 'callbacks' not in config:
        config['callbacks'] = default_callbacks
    if 'monitored_loss_name' not in config:
        config['monitored_loss_name'] = default_monitored_loss_name
    if 'max_queue_size' not in config:
        config['max_queue_size'] = default_max_queue_size
    if 'does_use_multiprocessing' not in config:
        config['does_use_multiprocessing'] = default_does_use_multiprocessing
    if 'worker_number' not in config:
        config['worker_number'] = default_worker_number
    if 'verbose' not in config:
        config['verbose'] = default_verbose
    callbacks = list() if config['callbacks'] is not None else None
    if callbacks is not None:
        for cb in config['callbacks']:
            if isinstance(cb, keras.callbacks.Callback):
                if isinstance(cb, TimeLimiter) and time_limit is not None:
                    logging.warning(
                        'train_model: parameter time_limit is not None, ignored TimeLimiter in config.'
                    )
                    continue
                callbacks.append(cb)
            elif isinstance(cb, str):
                cb_str = cb.lower()
                cb_str = re.sub(pattern=_remove_pattern,
                                repl='',
                                string=cb_str)
                sep_idx = cb_str.find(':')
                cb_params = dict()
                if sep_idx >= 0:
                    cb_name = cb_str[:sep_idx]
                    cb_params_strs = cb_str[sep_idx + 1:].split(',')
                    for cb_param_str in cb_params_strs:
                        eq_idx = cb_param_str.find('=')
                        if eq_idx >= 0:
                            cb_params[
                                cb_param_str[:eq_idx]] = cb_param_str[eq_idx +
                                                                      1:]
                        else:
                            cb_params[cb_param_str] = '1'
                else:
                    cb_name = cb_str
                if cb_name == 'earlystopping':
                    es_monitor = config[
                        'monitored_loss_name'] if 'monitor' not in cb_params else cb_params[
                            'monitor']
                    if 'baseline' not in cb_params:
                        _, es_baseline = load_best_info(
                            model_name=model_name, monitor_name=es_monitor)
                    else:
                        es_baseline = float(cb_params['baseline'])
                    callbacks.append(
                        keras.callbacks.EarlyStopping(
                            monitor=es_monitor,
                            min_delta=EPSILON if 'min_delta' not in cb_params
                            else float(cb_params['min_delta']),
                            patience=2 if 'patience' not in cb_params else int(
                                cb_params['patience']),
                            verbose=1 if 'verbose' not in cb_params else int(
                                cb_params['verbose']),
                            mode='min'
                            if 'mode' not in cb_params else cb_params['mode'],
                            baseline=es_baseline,
                        ))
                elif cb_name == 'tensorboard':
                    callbacks.append(
                        keras.callbacks.TensorBoard(
                            log_dir=os.path.join(LOG_DIRECTORY, model_name)
                            if 'log_dir' not in cb_params else
                            cb_params['log_dir'],
                            batch_size=config['batch_size'],
                            write_graph=True if 'write_graph' not in cb_params
                            else str_to_bool(cb_params['write_graph']),
                        ))
                elif cb_name == 'modelsaver':
                    callbacks.append(
                        ModelSaver(
                            model_name=model_name,
                            period=1 if 'period' not in cb_params else int(
                                cb_params['period']),
                            verbose=1 if 'verbose' not in cb_params else int(
                                cb_params['verbose']),
                        ))
                elif cb_name == 'epochnumbersaver':
                    callbacks.append(
                        EpochNumberSaver(
                            model_name=model_name,
                            verbose=1 if 'verbose' not in cb_params else int(
                                cb_params['verbose']),
                        ))
                elif cb_name == 'bestinfosaver':
                    bi_monitor = config[
                        'monitored_loss_name'] if 'monitor' not in cb_params else cb_params[
                            'monitor']
                    if 'baseline' not in cb_params:
                        _, bi_baseline = load_best_info(
                            model_name=model_name, monitor_name=bi_monitor)
                    else:
                        bi_baseline = float(cb_params['baseline'])
                    callbacks.append(
                        BestInfoSaver(
                            model_name=model_name,
                            monitor=bi_monitor,
                            mode='min'
                            if 'mode' not in cb_params else cb_params['mode'],
                            baseline=bi_baseline,
                            verbose=1 if 'verbose' not in cb_params else int(
                                cb_params['verbose']),
                        ))
                elif cb_name == 'timelimiter':
                    if time_limit is not None:
                        logging.warning(
                            'train_model: parameter time_limit is not None, ignored TimeLimiter in config.'
                        )
                        continue
                    if 'limit' not in cb_params:
                        raise ValueError(
                            "TimeLimiter's parameter limit is missed.")
                    callbacks.append(
                        TimeLimiter(
                            limit=cb_params['limit'],
                            verbose=1 if 'verbose' not in cb_params else int(
                                cb_params['verbose']),
                        ))
                else:
                    raise UnknownCallbackNameException(cb)
            else:
                raise TypeError(
                    'Callback must be an instance of keras.callbacks.Callback or a callback name(string).'
                )
    if time_limit is not None:
        callbacks.append(TimeLimiter(limit=time_limit, verbose=1))
    rolling_window_size = get_rolling_window_size(model_name)
    generator = SquareExDataGenerator(
        dataset_name=DATASET_NAME_TRAIN,
        rolling_window_size=rolling_window_size,
        row_start=row_start,
        row_end=row_end,
        step=step,
        max_batch_size=config['batch_size'],
        does_shuffle=config['does_shuffle'],
    )
    history = model.fit_generator(
        generator=generator,
        epochs=end_epoch,
        verbose=config['verbose'],
        callbacks=callbacks,
        max_queue_size=config['max_queue_size'],
        use_multiprocessing=config['does_use_multiprocessing'],
        workers=config['worker_number'],
        initial_epoch=initial_epoch,
    )
    return history