def recover_json(config, paths): logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) currencies = config.get("presets", {}).get("currencies", ["CAD"]) for currency in currencies: config['presets']['currency'] = currency config_type = config.get("configurations", {}).get("run", {}).get("pickle") pickle_file = get_configuration_path(config, config_type, paths) name, ext = os.path.splitext(pickle_file) json_file = "{}_{}{}".format(name,currency, ".json") pickle_file = "{}_{}{}".format(name,currency, ext) logger.info("Files\n\tpickle: {}\n\tjson: {}".format(pickle_file, json_file)) if not 'presets' in config: config['presets'] config['presets']['pickle'] = pickle_file config['presets']['json'] = json_file if os.path.exists(json_file): data = load_json(json_file) save_pickle_data(data, pickle_file) else: logger.error("missing path {}".format(json_file))
def main(processlist): picklename = 'filelist.pickle' data = get_pickle_data(picklename) processmap = buildprocessmap() for p in processlist: proc = processmap.get(p) if proc: logger.info('Running {}'.format(p)) data = proc(data) save_pickle_data(data=data, pickleName=picklename)
logger = logging.getLogger(__name__) homepath = os.path.expanduser("~") datapaths = ["OneDrive - Great Canadian Railtour Co", "Jupyter_NB"] savepath = os.path.join(homepath, *datapaths) filter_str = "[a-zA-Z_]*.json" pickle_file = "kaptio_dumps.pickle" data = get_pickle_data(pickle_file) data['files'] = [] data['names'] = {} for f in scanfiles(savepath): data['files'].append(f) if not f['file'] in data['names']: data['names'][f['file']] = [] data['names'][f['file']].append(f) logger.info("found {} files.".format(len(data['names']))) save_pickle_data(data, pickle_file) #savepath = r"c:\Users\dgloyncox\git\dreffed\Kaptio_API" logger.info("scanning folder {}".format(savepath)) for dirpath, _, filenames in os.walk(savepath): logger.info(dirpath, len(filenames))
def main(): logger = logging.getLogger(__name__) PATHS = { 'LOCAL': os.getcwd(), 'HOME': os.path.expanduser("~") } config = load_config('ktapi.json') try: logging.config.dictConfig(config.get('logger', {})) except: logging.basicConfig(level=logging.INFO) logger.error("Unable to load the logging config, default to terminal!") config["paths"] = PATHS run_data = { "homepath": PATHS.get('HOME', os.path.expanduser("~")), "localpath": PATHS.get('LOCAL', os.getcwd()), "pid": os.getpid(), "date": datetime.now().strftime("%Y-%m-%d-%H-%M-%S"), "server": socket.gethostname(), "processes": [] } logger.info("Timestamp: {}".format(run_data.get('date'))) savepath = get_folderpath(config, '_remote', PATHS) logger.info('Savepath: {}'.format(savepath)) config_type = config.get("configurations", {}).get("run", {}).get("kaptio") kaptio_config_file = get_configuration_path(config, config_type, config.get('paths', [])) logger.info("\tLoading config: {}".format(kaptio_config_file)) kaptio_config = load_kaptioconfig(kaptio_config_file) baseurl = kaptio_config['api']['baseurl'] run_data['baseurl'] = baseurl kt = KaptioClient(baseurl, kaptio_config['api']['auth']['key'], kaptio_config['api']['auth']['secret']) function_switch = { 'save_data': save_data, 'backup': backup_data, 'export': export_pickle, 'clear_data': clear_data, 'partial': init_partial, 'metadata': load_metadata, 'tax_profiles': update_taxprofiles, 'packages': process_packages, 'marketing': get_marketingnames, 'filter_packages': filter_packages, 'augment_packages': augment_packages, 'custom': promote_custom, 'dates': process_dates, 'prices': process_prices, 'price_para': process_price_parallel, 'augment_price': augment_pricelists, 'errors': process_errors, 'content': process_content, 'items': process_items, 'remove_pricedata': remove_pricedata, 'allsell': process_pricedata, 'bulkloader': process_bulkloader, 'xml': process_xml } currencies = config.get("presets", {}).get("currencies", ["CAD"]) for currency in currencies: config['presets']['currency'] = currency config_type = config.get("configurations", {}).get("run", {}).get("pickle") pickle_file = get_configuration_path(config, config_type, PATHS) name, ext = os.path.splitext(pickle_file) pickle_file = "{}_{}{}".format(name,currency, ext) logger.info("Loading pickle file {}".format(pickle_file)) if not 'presets' in config: config['presets'] config['presets']['pickle'] = pickle_file data = get_pickle_data(pickle_file) if len(data)> 0: logger.info("Data keys loaded...") for key, value in data.items(): if value: logger.info("\t{} => {} : {}".format(key, type(value), len(value))) else: logger.info("\t{} : No Values".format(key)) run_data['pickle'] = pickle_file for process in config.get('process', []): logger.info("Running: {}".format(process)) run_data['processes'].append(process) #try: if function_switch.get(process): data = function_switch.get(process)(config, data, kt, savepath) else: logging.warning("no process defined for {}".format(process)) #except Exception as ex: # logger.error('=== ERROR: {} => {}\n\tSTOPPING!'.format(process, ex)) # break run_data['end'] = datetime.now().strftime("%Y-%m-%d-%H-%M-%S") if not data.get('_runs'): data['_runs'] = {} run_name = "{}-{}".format(run_data.get('hostname'), run_data.get('date')) data['_runs'][run_name] = run_data logger.info("Data keys loaded...") for key, value in data.items(): if value: logger.info("\t{} => {} : {}".format(key, type(value), len(value))) else: logger.info("\t{} : No Values".format(key)) save_pickle_data(data, pickle_file) try: save_json("kt_api_data.json", data) except Exception as ex: logger.info("Failed to save JSON file.\n\t{}".format(ex))
def main(): PATHS = {'LOCAL': os.getcwd(), 'HOME': os.path.expanduser("~")} config = load_config('ktapi.json') try: logging.config.dictConfig(config.get('logger', {})) except: logging.basicConfig(level=logging.INFO) config["paths"] = PATHS scan_local = config.get("flags", {}).get("switches", {}).get("scan_local", False) scan_remote = config.get("flags", {}).get("switches", {}).get("scan_remote", False) get_remote_content = config.get("flags", {}).get("switches", {}).get("import_remote", False) check_updates = config.get("flags", {}).get("switches", {}).get("check_updates", False) savepath = get_folderpath(config, '_remote', PATHS) logger.info('Savepath: {}'.format(savepath)) timestamp = datetime.now().strftime("%Y%m%d%H%M%S") logger.info("Timestamp: {}".format(timestamp)) logger.info( "Runnins:\n\tscan local\t{}\n\tscan remote\t{}\n\tget remote\t{}\n\tcheck updates\t{}" .format(scan_local, scan_remote, get_remote_content, check_updates)) if scan_local: logger.info("Local Pickles:") for f in scanfiles('.', r".*\.pickle"): logger.info("\t{} => {}".format(f['file'], f['folder'])) if scan_remote: logger.info("Remote Pickles:") for f in scanfiles(os.path.join(savepath, 'config'), r".*\.pickle"): logger.info("\t{} => {}".format(f['file'], f['folder'])) currencies = config.get("presets", {}).get("currencies", ["CAD"]) for currency in currencies: config['presets']['currency'] = currency config_type = config.get("configurations", {}).get("run", {}).get("pickle") pickle_file = get_configuration_path(config, config_type, PATHS) name, ext = os.path.splitext(pickle_file) pickle_file = "{}_{}{}".format(name, currency, ext) logger.info("Loading pickle file {}".format(pickle_file)) if not 'presets' in config: config['presets'] config['presets']['pickle'] = pickle_file data = get_pickle_data(pickle_file) if get_remote_content: config_type = config.get("configurations", {}).get("run", {}).get("remote_pickle") if not config_type: logger.error( "\tUnable to local remote pickle details {configurations->run->remote_pickle}" ) break remote_pickle = get_configuration_path(config, config_type, config.get('paths', [])) logger.info("\tloading remote {}".format(remote_pickle)) data_src = get_pickle_data(remote_pickle) if 'content' in data_src: logger.info("Fetching remote cached content") kt_content = data_src.get('content') if kt_content: data['content'] = kt_content logger.info("Retrieved remote cached content") save_pickle_data(data, pickle_file) display_data(pickle_file=pickle_file, data=data, name="Local") if check_updates: update_prices(data=data, data_src=data_src)
def save_data(config, data, kt, savepath): if not data: data = {} pickle_file = config.get('presets', {}).get('pickle') save_pickle_data(data, pickle_file) return data