Example #1
0
 def __init__(self):
     super().__init__()
     self.config = ConfigHelper()
     self.model = self.config.get_broker_config(type(self).__name__.upper())
     self.main_words = [
         "acciones", "fondos", "cedears", "bonos", "total var. $"
     ]
Example #2
0
def process(hive_cmd_env):
    # 1.1 load model selectors and models' conf
    model_confs = ConfigHelper.load_config(
        os.path.join(os.path.dirname(__file__), 'conf', 'model_selectors_and_models.yaml'))
    model_selector = model_confs.get('model_selector')
    models = model_confs.get('models')
    # 1.2 load recmd requirement's conf
    requirements = ConfigHelper.load_config(os.path.join(os.path.dirname(__file__), 'conf', 'recmd_requirement.yaml'))
    # 1.3 load recmd score items
    score_params = ConfigHelper.load_config(os.path.join(os.path.dirname(__file__), 'conf', 'score_params.yaml'))

    # 2 get model key bu running model selector
    if model_selector:
        recmd_model_key = get_recmd_model_key_by_running_model_selector(requirements, model_selector)
    else:
        recmd_model_key = 'default'

    # 3 get model by recmd key and run model
    if models:
        recmd_result = get_recmd_result_by_running_models(hive_cmd_env, requirements, score_params,
                                                          models, recmd_model_key)
    else:
        raise

    return recmd_result
Example #3
0
def main():
    data_dir = join(dirname(dirname(abspath(__file__))), 'train', 'data',
                    'normal')
    # generate src/tgt tables
    src_vocab_set, tgt_vocab_set = set(), set()
    with open(join(data_dir, 'train.en'), 'r') as f:
        for line in f:
            words = line.split()
            for word in words:
                src_vocab_set.add(word)
    with open(join(data_dir, 'train.fr'), 'r') as f:
        for line in f:
            words = line.split()
            for word in words:
                tgt_vocab_set.add(word)

    # write to config
    cfg = ConfigHelper()
    cfg.set_value('Others', 'normal_src_vocab_size', len(src_vocab_set) + 3)
    cfg.set_value('Others', 'normal_tgt_vocab_size', len(tgt_vocab_set) + 3)
    print('normal_src_vocab_size: {}'.format(len(src_vocab_set) + 3))
    print('normal_tgt_vocab_size: {}'.format(len(tgt_vocab_set) + 3))

    # write to src/tgt vocab file
    fw = open(join(data_dir, 'vocab.en'), 'w')
    write_header(fw)
    for vocab in list(src_vocab_set):
        fw.write('{}\n'.format(vocab))
    fw.close()
    fw = open(join(data_dir, 'vocab.fr'), 'w')
    write_header(fw)
    for vocab in list(tgt_vocab_set):
        fw.write('{}\n'.format(vocab))
    fw.close()
    print('generate normal vocab files successfully!')
Example #4
0
def create_database():
    if not os.path.exists('./database.db'):
        print("Create a database\n")
        database.create_all()
        sleep(0.1)

    file_name = "products_79936.json"
    if not os.path.exists(file_name):
        # Download of mock database
        Process.run(
            'curl https://servicespub.prod.api.aws.grupokabum.com.br/descricao/v1/descricao/produto/79936 >> %s'
            % file_name)

    ## Save database ##
    # Read $filename
    config_file = './%s' % file_name
    config_helper = ConfigHelper(config_file)
    config = config_helper.load_config()

    # Read only products of config
    config = config['familia']['produtos']
    for data in config:
        product = ProductModel(**data)
        try:
            # Save products in database
            product.save_product()
            sleep(0.01)
        except:
            print({"message": "An error ocurred trying to create product."},
                  500)  # Internal Server Error
Example #5
0
def get_connection_info():
    ci = namedtuple("ConnectionInfo", "dbhost, dbport, dbuser, dbpass, dbname")
    config = ConfigHelper()
    ci.dbhost = config.get_config("DB_HOST")
    ci.dbport = config.get_config("DB_PORT")
    ci.dbuser = config.get_config("DB_USER")
    ci.dbpass = config.get_config("DB_PASS")
    ci.dbname = config.get_config("DB_NAME")
    return ci
 def test_get_recmd_model_key(self):
     requirement = ConfigHelper.load_config(
         'E:\\PythonProject\\Work\\work_3\\dake_recommendation\\conf\\recmd_requirement.yaml'
     )
     model_selector = ConfigHelper.load_config(
         'E:\\PythonProject\\Work\\work_3\\dake_recommendation\\conf\\model_selectors_and_models.yaml'
     ).get('model_selector')
     results = ['md5_model_0001', 'md5_model_0002']
     self._list_in(get_recmd_model_key_by_running_model_selector(requirement, model_selector), results)
Example #7
0
class BullMarkets(ChromeHelper):
    def __init__(self):
        super().__init__()
        self.config = ConfigHelper()
        self.model = self.config.get_broker_config(type(self).__name__.upper())
        self.main_words = [
            "acciones", "fondos", "cedears", "bonos", "total var. $"
        ]

    def login(self, login_model: LoginModel, url: str):
        """

        :param login_model:
        :param url:
        :return:
        """

        config = self.config.get_all_config("BULLMARKETS")

        self.browser.get(url)
        self.browser.execute_script(config["script_modal"])

        time.sleep(5)
        username = self.browser.find_element_by_id(
            login_model.username_element)
        password = self.browser.find_element_by_id(
            login_model.password_element)

        username.send_keys(login_model.username)
        password.send_keys(login_model.password)
        time.sleep(5)
        self.browser.execute_script(config["script_login"])

    def get_data(self):
        """

        :return:
        """
        self.login(self.model, "https://bullmarketbrokers.com/")
        time.sleep(10)
        rows = self.get_rows_table("tbody_accountBrief")
        response = {}
        main_words_word = ''
        for row in rows:
            tds: list = list(
                filter(None,
                       [x.text for x in row.find_elements_by_tag_name("td")]))
            if self.main_words.count(tds[0].lower()) > 0:
                main_words_word = tds[0]
                response[main_words_word] = {}
                response["total"] = tds[1:len(tds)]
                continue
            response[main_words_word][tds[0]] = []
            for i in range(1, len(tds)):
                response[main_words_word][tds[0]].append(tds[i])
        return response
Example #8
0
 def test_get_value(self, mock_path_exists):
     mock_path_exists.return_value = True
     read_data = """{"key":"value"}"""
     mo = mock_open(read_data=read_data)
     with patch('utils.config_helper.open', mo):
         config = ConfigHelper()
         self.assertEqual(
             config.get_config("key"), "value",
             """Is is not finding the correct
                                     key-value config""")
Example #9
0
 def get_registration_info(self):
     file = 'config.xml'
     config_helper = ConfigHelper(file)
     config = ConfigData()
     ret = config_helper.init_root()
     if ret:
         config_helper.parse(config)
         self.txt_registration_info.setText(config._qds_id)
     else:
         QMessageBox.information(self, '提示', '配置文件config.xml出错')
Example #10
0
 def get_authorize(self):
     file = 'config.xml'
     config_helper = ConfigHelper(file)
     config = ConfigData()
     ret = config_helper.init_root()
     if ret:
         config_helper.parse(config)
         self.txt_access_key.setText(config._access_key)
         self.txt_secret_key.setText(config._secret_key)
     else:
         QMessageBox.information(self, '提示', '配置文件config.xml出错')
Example #11
0
    def __init__(self):
        config = ConfigHelper()
        self.bucket = config.get_config("GCP_BUCKET")
        self.account = config.get_config("GOOGLE_APPLICATION_CREDENTIALS")

        # all these values are necessary
        if not self.bucket or not self.account:
            raise Exception("""The GCP_BUCKET or GOOGLE_APPLICATION_CREDENTIALS
            are not set and there is not a .dspreview.json file in the user's
            home folder, please provide one of them.""")

        # make sure they are the same
        os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = self.account
        self._service = None
Example #12
0
 def test_with_invalid_file(self, mock_path_exists):
     mock_path_exists.return_value = True
     read_data = "{..."
     mo = mock_open(read_data=read_data)
     with patch('utils.config_helper.open', mo):
         with self.assertRaises(Exception):
             ConfigHelper()
 def test_rescore(self):
     col_name = ['id', 'name', 'age', 'money']
     recmd_results_without_score = pd.read_csv(
         'E:\\PythonProject\\Work\\work_3\\dake_recommendation\\test\\test_1.txt', encoding='utf-8', names=col_name,
         sep=' ')
     score_params = ConfigHelper.load_config('E:\\PythonProject\\Work\\work_3\\dake_recommendation\\conf\\tmp.yaml')
     model_params = {}
     print rescore(recmd_results_without_score, score_params, model_params)
Example #14
0
    def set_params(self):
        period = self.cbx_period.currentText().strip()
        ema_fast = self.txt_ema_fast.toPlainText().strip()
        ema_slow = self.txt_ema_slow.toPlainText().strip()
        open_offset = self.txt_open_offset.toPlainText().strip()
        open_interval = self.txt_open_interval.toPlainText().strip()
        stop_earning_offset = self.txt_stop_earning_offset.toPlainText().strip(
        )
        stop_loss_offset = self.txt_stop_loss_offset.toPlainText().strip()
        level_rate = self.txt_level_rate.toPlainText().strip()
        max_number = self.txt_max_num.toPlainText().strip()

        file = 'config.xml'
        config_helper = ConfigHelper(file)
        config_to_save = ConfigData()
        ret = config_helper.init_root()
        if ret:
            config_helper.parse(config_to_save)
            config_to_save._period = period
            config_to_save._ema_fast = ema_fast
            config_to_save._ema_slow = ema_slow
            config_to_save._open_offset = open_offset
            config_to_save._open_interval = open_interval
            config_to_save._stop_earning_offset = stop_earning_offset
            config_to_save._level_rate = level_rate
            config_to_save._max_open_number = max_number
            config_helper.save(config_to_save)
        else:
            QMessageBox.information(self, '提示', '配置文件config.xml出错')
Example #15
0
 def test_with_valid_file(self, mock_path_exists):
     mock_path_exists.return_value = True
     read_data = """{"key":"value"}"""
     mo = mock_open(read_data=read_data)
     with patch('utils.config_helper.open', mo):
         try:
             ConfigHelper()
         except Exception:
             self.fail("""It shouldn't raise an exception since
             we are feeding a valid file""")
Example #16
0
def set_logging(log_folder_path, log_level):
    # set log file path
    log_config = ConfigHelper.load_config(os.path.join(os.path.dirname(__file__), 'conf', 'log.yaml'))
    log_file_name = 'log'
    if not os.path.exists(log_folder_path):
        os.makedirs(log_folder_path)
    log_file_path = os.path.join(log_folder_path, log_file_name)
    log_config['handlers']['file_handler']['filename'] = log_file_path
    log_config['handlers']['file_handler']['level'] = log_level
    logging.config.dictConfig(log_config)
Example #17
0
def process(hive_cmd_env):
    # read conf 2: download from hive
    hive_calculate_and_download = ConfigHelper.load_config(
        os.path.join(os.path.dirname(__file__), 'conf', 'hive_calculate_and_download.yaml')
    )
    for hql_number, hql_detail in hive_calculate_and_download.items():
        # download files from hive after calculate
        download_file_from_hive(
            hive_cmd_env, hql_detail.get('hql'), hql_detail.get('hql_date_diff'),
            hql_detail.get('data_store_path'), hql_detail.get('update_mode')
        )
Example #18
0
 def get_params(self):
     file = 'config.xml'
     config_helper = ConfigHelper(file)
     config = ConfigData()
     global g_config
     g_config = config
     ret = config_helper.init_root()
     if ret:
         config_helper.parse(config)
         self.cbx_period.setCurrentText(config._period)
         self.txt_ema_fast.setText(config._ema_fast)
         self.txt_ema_slow.setText(config._ema_slow)
         self.txt_open_offset.setText(config._open_offset)
         self.txt_open_interval.setText(config._open_interval)
         self.txt_stop_earning_offset.setText(config._stop_earning_offset)
         self.txt_stop_loss_offset.setText("0")
         self.txt_level_rate.setText(config._level_rate)
         self.txt_max_num.setText(config._max_open_number)
     else:
         QMessageBox.information(self, '提示', '配置文件config.xml出错')
Example #19
0
def set_logging(error_log_config_file_path, error_log_folder_path,
                error_log_level):
    if not os.path.exists(error_log_folder_path):
        os.makedirs(error_log_folder_path)

    # set log file path and level
    # log_config = ConfigHelper.load_config(os.path.join(os.path.dirname(__file__), "conf", "log.yaml"))
    log_config = ConfigHelper.load_config(error_log_config_file_path)
    log_config["handlers"]["file_handler"]["filename"] = os.path.join(
        error_log_folder_path, "logs")
    log_config["handlers"]["file_handler"]["level"] = error_log_level

    logging.config.dictConfig(log_config)
Example #20
0
def process(hive_cmd_env):
    # read conf 1: download from hive
    hive_calculate_and_download = ConfigHelper.load_config(
        os.path.join(os.path.dirname(__file__), 'conf', 'hive_calculate_and_download.yaml')
    )
    for hql_number, hql_detail in hive_calculate_and_download.items():
        # download files from hive after calculate
        download_file_from_hive(
            hive_cmd_env, hql_detail.get('hql'), hql_detail.get('data_store_path')
        )

    # read conf 2: wrap to json and upload
    wrap_to_json_and_upload_to_es = ConfigHelper.load_config(
        os.path.join(os.path.dirname(__file__), 'conf', 'wrap_to_json_and_upload_to_es.yaml')
    )
    for file_number, process_detail in wrap_to_json_and_upload_to_es.items():
        # wrap the downloaded files to json
        json_schema_path, json_file_path = wrap_to_json(
            process_detail.get('schema_path'), process_detail.get('data_path'),
            process_detail.get('index_name'), process_detail.get('type_name')
        )

        # and then upload them to it
        upload_to_es(process_detail.get('index_name'), json_schema_path, json_file_path)
Example #21
0
def process(args_config_file_path):
    args_config = ConfigHelper.load_config(args_config_file_path)

    # 1. get training or testing mode:
    is_training = args_config["is_training"]
    # 2. get middle result log dir params
    middle_result_log_dir_params = args_config["middle_result_log_dir_params"]
    # 3. get model params:
    model_params = args_config["model_params"]
    # 4. get running params from and run your model
    training_params = args_config["training_params"]
    testing_params = args_config["testing_params"]

    # step 1. get dirname pattern
    pattern, default_pattern = middle_result_log_dir_params[
        "pattern"], middle_result_log_dir_params["default_pattern"]
    if pattern is None or len(pattern) == 0:
        pattern = default_pattern

    # step 2. get and generate middle result log dir
    if is_training:
        # get middle result log dir automatically from config args under training mode
        tensorboard_dir, model_dir = genarate_middle_result_log_dir(
            args_config_file_path,
            middle_result_log_dir_params["tensorboard_dir"],
            middle_result_log_dir_params["model_dir"],
            middle_result_log_dir_params["note_dir"],
            middle_result_log_dir_params["self_increasing_mode"]
            and is_training, pattern)

        training_params.update({"tensorboard_dir": tensorboard_dir})
        training_params.update({"model_dir": model_dir})

    # step 3. todo: train or test your model
    if is_training:
        set_up_visiable_gpu(training_params.get("gpu_num"),
                            training_params.get("gpu_device_num"))

        model_handler = ModelHandler(model_params)
        model_handler.train(training_params)
    else:
        set_up_visiable_gpu(testing_params.get("gpu_num"),
                            testing_params.get("gpu_device_num"))

        model_handler = ModelHandler(model_params)
        model_handler.test(testing_params)
Example #22
0
 def save_registration_info(self):
     registration_info = self.txt_registration_info.toPlainText().strip()
     if not registration_info:
         QMessageBox.information(self, '提示', '注册信息不完整')
         return
     file = 'config.xml'
     config_helper = ConfigHelper(file)
     config_to_save = ConfigData()
     ret = config_helper.init_root()
     if ret:
         config_helper.parse(config_to_save)
         config_to_save._qds_id = registration_info
         config_helper.save(config_to_save)
         QMessageBox.information(self, '提示', '信息已保存, 重启软件生效')
     else:
         QMessageBox.information(self, '警告', '配置文件config.xml出错')
Example #23
0
 def set_authorize(self):
     access_key = self.txt_access_key.toPlainText().strip()
     secret_key = self.txt_secret_key.toPlainText().strip()
     if not access_key or not secret_key:
         QMessageBox.information(self, '提示', '信息不完整')
         return
     file = 'config.xml'
     config_helper = ConfigHelper(file)
     config_to_save = ConfigData()
     ret = config_helper.init_root()
     if ret:
         config_helper.parse(config_to_save)
         config_to_save._access_key = access_key
         config_to_save._secret_key = secret_key
         config_helper.save(config_to_save)
         QMessageBox.information(self, '提示', '信息已保存,重启软件生效')
     else:
         QMessageBox.information(self, '提示', '配置文件config.xml出错')
Example #24
0
 def __enter__(self):
     try:
         config = ConfigHelper()
         credentials = pika.PlainCredentials(config.get_config("MQ_USER"),
                                             config.get_config("MQ_PASS"))
         host = config.get_config("MQ_HOST")
         port = config.get_config("MQ_PORT")
         vhost = config.get_config("MQ_VHOST")
         self.queue = config.get_config("MQ_QUEUE")
         parameters = pika.ConnectionParameters(host=host,
                                                port=port,
                                                virtual_host=vhost,
                                                credentials=credentials)
         self.connection = pika.BlockingConnection(parameters)
         self.channel = self.connection.channel()
         self.channel.queue_declare(queue=self.queue, durable=True)
     except Exception as err:
         logger.exception(err)
         raise Exception("""It was not possible to connect to the MQ,
                         please check the connection information""")
     return self
Example #25
0
from model import NMTModel
from helpers.misc_helper import create_hparams
from helpers.input_helper import get_infer_input, get_input
from helpers.vocab_helper import create_idx2vocab_tables, create_vocab2idx_tables
from helpers.evaluation_helper import compute_bleu

sys.path.append(dirname(dirname(abspath(__file__))))
from utils.config_helper import ConfigHelper

flags = tf.flags
logging = tf.logging
logging.set_verbosity(tf.logging.INFO)

CHECKPOINT_BASENAME = 'model.ckpt'

cfg = ConfigHelper()
hparams = create_hparams(cfg)


def _run_training():
    data_dir = join(dirname(abspath(__file__)), 'data')
    # vocab files
    src_vocab_file = join(data_dir, hparams.vocab_prefix + '.' + hparams.src)
    tgt_vocab_file = join(data_dir, hparams.vocab_prefix + '.' + hparams.tgt)
    src_vocab2idx_table, tgt_vocab2idx_table = create_vocab2idx_tables(
        src_vocab_file, tgt_vocab_file, hparams.share_vocab)
    src_idx2vocab_table, tgt_idx2vocab_table = create_idx2vocab_tables(
        src_vocab_file, tgt_vocab_file, hparams.share_vocab)
    # train files
    train_src_file = join(data_dir, hparams.train_prefix + '.' + hparams.src)
    train_tgt_file = join(data_dir, hparams.train_prefix + '.' + hparams.tgt)
Example #26
0
from utils.register import Register

logging.basicConfig(level=logging.DEBUG,  # 控制台打印的日志级别
                    filename='qds.log',
                    filemode='w',  ##模式,有w和a,w就是写模式,每次都会重新写日志,覆盖之前的日志
                    # a是追加模式,默认如果不写的话,就是追加模式
                    format='%(asctime)s : %(message)s'
                    # '%(asctime)s - %(pathname)s[line:%(lineno)d] - %(levelname)s: %(message)s'
                    # 日志格式
                    )

URL = 'https://api.btcgateway.pro'


file = 'config.xml'
config_helper = ConfigHelper(file)
config = ConfigData()
ret = config_helper.init_root()
if ret:
    config_helper.parse(config)
else:
    logging.debug("Error, please check file {0}".format(file))
    sys.exit(-1)

ACCESS_KEY = config._access_key
SECRET_KEY = config._secret_key

dm = ReliableHuobiDM(URL, ACCESS_KEY, SECRET_KEY)

# 1min, 5min, 15min, 30min, 60min,4hour,1day, 1mon
period = config._period