예제 #1
0
    def test_load_config_abnormal(self, os_mocked):
        """Test the `utils_config.load_config` function in its abnormal operation.

            Args:
                os_mocked: os built-in module mocked.

            """
        os_mocked.path.exists.return_value = False
        # Execute the method from the module.
        with self.assertRaises(ValueError):
            utils_config.load_config(self.test_config_file)
예제 #2
0
def main():
    logger = logging.getLogger(__name__)

    PATHS = {'LOCAL': os.getcwd(), 'HOME': os.path.expanduser("~")}

    config = load_config('ktapi.json')
    try:
        logging.config.dictConfig(config.get('logger', {}))
    except:
        logging.basicConfig(level=logging.INFO)
    config["paths"] = PATHS

    folderpath = get_folderpath(config, '_config', PATHS)
    logging.info("Scanning folder: {}".format(folderpath))

    for f in scanfiles(folder=folderpath, filter=".pickle$"):
        filepath = os.path.join(f.get('folder', ''), f.get('file'))
        data = get_pickle_data(pickleName=filepath)
        for k, v in data.items():
            if v:
                logger.info("\t{} => {} : {}".format(k, type(v), len(v)))
            else:
                logger.info("\t{}".format(k))

        content = data.get('content')
        if content:
            filename, _ = os.path.splitext(f.get('file'))
            filename = '{}.json'.format(filename)
            save_json(file_path=filename, data=content)
            logger.info("Export: {} => {}".format('content', filename))
예제 #3
0
    def test_load_config(self, os_mocked):
        """Test the `utils_config.load_config` function in its normal operation.

        Args:
            os_mocked: os built-in module mocked.

        """

        os_mocked.path.exists.return_value = True
        # Execute the method from the module.
        with mock.patch(
                "utils_config.open",
                mock.mock_open(read_data=str(self.test_data).replace(
                    "'", '"'))) as mock_open:
            obtained = utils_config.load_config(self.test_config_file)
            self.assertEqual(self.test_config,
                             obtained,
                             msg="Expected: {}, Obtained: {}".format(
                                 self.test_config.key1, obtained.key1))
예제 #4
0
def main():
    logger = logging.getLogger(__name__)

    PATHS = {
        'LOCAL': os.getcwd(),
        'HOME': os.path.expanduser("~") 
    }

    config = load_config('ktapi.json')
    try:
        logging.config.dictConfig(config.get('logger', {}))
    except:
        logging.basicConfig(level=logging.INFO)

    savepath = get_folderpath(config, '_remote', PATHS)
    logger.info('Savepath: {}'.format(savepath))

    backup_data(config, None, None, savepath)

    recover_json(config, PATHS)

    backup_data(config, None, None, savepath)
예제 #5
0
def main():
    logger = logging.getLogger(__name__)

    PATHS = {
        'LOCAL': os.getcwd(),
        'HOME': os.path.expanduser("~") 
    }

    config = load_config('ktapi.json')
    try:
        logging.config.dictConfig(config.get('logger', {}))
    except:
        logging.basicConfig(level=logging.INFO)
        logger.error("Unable to load the logging config, default to terminal!")
        
    config["paths"] = PATHS

    run_data = {
        "homepath": PATHS.get('HOME', os.path.expanduser("~")),
        "localpath": PATHS.get('LOCAL', os.getcwd()),
        "pid": os.getpid(),
        "date": datetime.now().strftime("%Y-%m-%d-%H-%M-%S"),
        "server": socket.gethostname(),
        "processes": []
    }

    logger.info("Timestamp: {}".format(run_data.get('date')))

    savepath = get_folderpath(config, '_remote', PATHS)
    logger.info('Savepath: {}'.format(savepath))

    config_type = config.get("configurations", {}).get("run", {}).get("kaptio")
    kaptio_config_file = get_configuration_path(config, config_type, config.get('paths', []))
    logger.info("\tLoading config: {}".format(kaptio_config_file))

    kaptio_config = load_kaptioconfig(kaptio_config_file)    
    baseurl = kaptio_config['api']['baseurl']
    
    run_data['baseurl'] = baseurl

    kt = KaptioClient(baseurl, kaptio_config['api']['auth']['key'], kaptio_config['api']['auth']['secret'])

    function_switch = {
        'save_data': save_data,
        'backup': backup_data,
        'export': export_pickle,
        'clear_data': clear_data,
        'partial': init_partial,
        'metadata': load_metadata,
        'tax_profiles': update_taxprofiles,
        'packages': process_packages,
        'marketing': get_marketingnames,
        'filter_packages': filter_packages,
        'augment_packages': augment_packages,
        'custom': promote_custom,
        'dates': process_dates,
        'prices': process_prices,
        'price_para': process_price_parallel,
        'augment_price': augment_pricelists,
        'errors': process_errors,
        'content': process_content,
        'items': process_items,
        'remove_pricedata': remove_pricedata,
        'allsell': process_pricedata,
        'bulkloader': process_bulkloader,
        'xml': process_xml
    }

    currencies = config.get("presets", {}).get("currencies", ["CAD"])
    for currency in currencies:
        config['presets']['currency'] = currency
        config_type = config.get("configurations", {}).get("run", {}).get("pickle")
        pickle_file = get_configuration_path(config, config_type, PATHS)
        name, ext = os.path.splitext(pickle_file)
        pickle_file = "{}_{}{}".format(name,currency, ext)
        logger.info("Loading pickle file {}".format(pickle_file))
        if not 'presets' in config:
            config['presets']

        config['presets']['pickle'] = pickle_file
        data = get_pickle_data(pickle_file)

        if len(data)> 0:
            logger.info("Data keys loaded...")
            for key, value in data.items():
                if value:
                    logger.info("\t{} => {} : {}".format(key, type(value), len(value)))
                else:
                    logger.info("\t{} : No Values".format(key))

        run_data['pickle'] = pickle_file

        for process in config.get('process', []):
            logger.info("Running: {}".format(process))
            run_data['processes'].append(process)
            #try:
            if function_switch.get(process):
                data = function_switch.get(process)(config, data, kt, savepath)
            else:
                logging.warning("no process defined for {}".format(process))
            #except Exception as ex:
            #    logger.error('=== ERROR: {} => {}\n\tSTOPPING!'.format(process, ex))
            #    break

        run_data['end'] = datetime.now().strftime("%Y-%m-%d-%H-%M-%S")

        if not data.get('_runs'):
            data['_runs'] = {}

        run_name = "{}-{}".format(run_data.get('hostname'), run_data.get('date'))
        data['_runs'][run_name] = run_data

        logger.info("Data keys loaded...")
        for key, value in data.items():
            if value:
                logger.info("\t{} => {} : {}".format(key, type(value), len(value)))
            else:
                logger.info("\t{} : No Values".format(key))

        save_pickle_data(data, pickle_file)
        try:
            save_json("kt_api_data.json", data)
        except Exception as ex:
            logger.info("Failed to save JSON file.\n\t{}".format(ex))
예제 #6
0
def get_config_file(json_file):
    config = utils_config.load_config(json_file)
    return utils_config.serialize_config(config)
예제 #7
0
                        i += 1
                        if i > max_len:
                            break
        else:
            logger.info("\t{} : No Values".format(key))

    save_json("{}.json".format(base_name), data)
    return data
    
if __name__ == "__main__":
    PATHS = {
        'LOCAL': os.getcwd(),
        'HOME': os.path.expanduser("~") 
    }

    config = load_config('ktapi.json')
    try:
        logging.config.dictConfig(config.get('logger', {}))
    except:
        logging.basicConfig(level=logging.INFO)
    config["paths"] = PATHS

    savepath = get_folderpath(config, '_remote', PATHS)
    logger.info('Savepath: {}'.format(savepath))

    currencies = config.get("presets", {}).get("currencies", ["CAD"])
    for currency in currencies:
        config['presets']['currency'] = currency
        config_type = config.get("configurations", {}).get("run", {}).get("pickle")
        pickle_file = get_configuration_path(config, config_type, PATHS)
        name, ext = os.path.splitext(pickle_file)
예제 #8
0
def main():
    PATHS = {'LOCAL': os.getcwd(), 'HOME': os.path.expanduser("~")}

    config = load_config('ktapi.json')
    try:
        logging.config.dictConfig(config.get('logger', {}))
    except:
        logging.basicConfig(level=logging.INFO)
    config["paths"] = PATHS

    scan_local = config.get("flags", {}).get("switches",
                                             {}).get("scan_local", False)
    scan_remote = config.get("flags", {}).get("switches",
                                              {}).get("scan_remote", False)
    get_remote_content = config.get("flags",
                                    {}).get("switches",
                                            {}).get("import_remote", False)
    check_updates = config.get("flags",
                               {}).get("switches",
                                       {}).get("check_updates", False)

    savepath = get_folderpath(config, '_remote', PATHS)
    logger.info('Savepath: {}'.format(savepath))
    timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
    logger.info("Timestamp: {}".format(timestamp))

    logger.info(
        "Runnins:\n\tscan local\t{}\n\tscan remote\t{}\n\tget remote\t{}\n\tcheck updates\t{}"
        .format(scan_local, scan_remote, get_remote_content, check_updates))

    if scan_local:
        logger.info("Local Pickles:")
        for f in scanfiles('.', r".*\.pickle"):
            logger.info("\t{} => {}".format(f['file'], f['folder']))

    if scan_remote:
        logger.info("Remote Pickles:")
        for f in scanfiles(os.path.join(savepath, 'config'), r".*\.pickle"):
            logger.info("\t{} => {}".format(f['file'], f['folder']))

    currencies = config.get("presets", {}).get("currencies", ["CAD"])
    for currency in currencies:
        config['presets']['currency'] = currency
        config_type = config.get("configurations", {}).get("run",
                                                           {}).get("pickle")
        pickle_file = get_configuration_path(config, config_type, PATHS)
        name, ext = os.path.splitext(pickle_file)
        pickle_file = "{}_{}{}".format(name, currency, ext)
        logger.info("Loading pickle file {}".format(pickle_file))
        if not 'presets' in config:
            config['presets']

        config['presets']['pickle'] = pickle_file
        data = get_pickle_data(pickle_file)

        if get_remote_content:
            config_type = config.get("configurations",
                                     {}).get("run", {}).get("remote_pickle")
            if not config_type:
                logger.error(
                    "\tUnable to local remote pickle details {configurations->run->remote_pickle}"
                )
                break

            remote_pickle = get_configuration_path(config, config_type,
                                                   config.get('paths', []))
            logger.info("\tloading remote {}".format(remote_pickle))

            data_src = get_pickle_data(remote_pickle)

            if 'content' in data_src:
                logger.info("Fetching remote cached content")
                kt_content = data_src.get('content')

                if kt_content:
                    data['content'] = kt_content
                    logger.info("Retrieved remote cached content")

                    save_pickle_data(data, pickle_file)
        display_data(pickle_file=pickle_file, data=data, name="Local")

        if check_updates:
            update_prices(data=data, data_src=data_src)