Пример #1
0
def scanfolder(data, filepath):
    if not data:
        data = {}

    filelist = data.get('files', [])

    scannedpaths = data.get('scanned', {})
    if not filepath in scannedpaths:
        scan = {
            "filepath": filepath,
            "scanned": datetime.datetime.now(),
        }

        found = 0
        for f in scanfiles(filepath):
            filelist.append(f)
            found += 1

        logger.info('Found {} / {} files'.format(found, len(filelist)))
        scan['found'] = found

        data['files'] = filelist
        scannedpaths[filepath] = scan

    data['scanned'] = scannedpaths
    return data
Пример #2
0
def main():
    logger = logging.getLogger(__name__)

    PATHS = {'LOCAL': os.getcwd(), 'HOME': os.path.expanduser("~")}

    config = load_config('ktapi.json')
    try:
        logging.config.dictConfig(config.get('logger', {}))
    except:
        logging.basicConfig(level=logging.INFO)
    config["paths"] = PATHS

    folderpath = get_folderpath(config, '_config', PATHS)
    logging.info("Scanning folder: {}".format(folderpath))

    for f in scanfiles(folder=folderpath, filter=".pickle$"):
        filepath = os.path.join(f.get('folder', ''), f.get('file'))
        data = get_pickle_data(pickleName=filepath)
        for k, v in data.items():
            if v:
                logger.info("\t{} => {} : {}".format(k, type(v), len(v)))
            else:
                logger.info("\t{}".format(k))

        content = data.get('content')
        if content:
            filename, _ = os.path.splitext(f.get('file'))
            filename = '{}.json'.format(filename)
            save_json(file_path=filename, data=content)
            logger.info("Export: {} => {}".format('content', filename))
Пример #3
0
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

homepath = os.path.expanduser("~")
datapaths = ["OneDrive - Great Canadian Railtour Co", "Jupyter_NB"]
savepath = os.path.join(homepath, *datapaths)

filter_str = "[a-zA-Z_]*.json"

pickle_file = "kaptio_dumps.pickle"
data = get_pickle_data(pickle_file)

data['files'] = []
data['names'] = {}
for f in scanfiles(savepath):
    data['files'].append(f)
    
    if not f['file'] in data['names']:
        data['names'][f['file']] = []
    data['names'][f['file']].append(f)

logger.info("found {} files.".format(len(data['names'])))

save_pickle_data(data, pickle_file)

#savepath = r"c:\Users\dgloyncox\git\dreffed\Kaptio_API"
logger.info("scanning folder {}".format(savepath))
for dirpath, _, filenames in os.walk(savepath):
    logger.info(dirpath, len(filenames))
Пример #4
0
season_end = data['season']['end']
kt_packages = data['packages']
kt_pricelist = data['pricelist']

rev_occ = {}
for key, value in occupancy.items():
    rev_occ[value] = key

# get teh file list
datapath = localpath #os.path.join(savepath, 'data')
fn_pricelist = []
fn_reprocess = []
filecount = 0
errorcount = 0
logger.info("scanning... {}".format(datapath))
for f in scanfiles(datapath, r'price_([a-zA-Z0-9]+)_([\d]+)\.json'):
    filepath = os.path.join(f['folder'], f['file'])
    try:
        filecount += 1
        with open(filepath, mode='r') as fp:
            try:
                pl_data = json.load(fp)
                fn_pricelist.append(pl_data)
                continue
            except Exception as ex:
                pass

            try:
                pl_data = json.loads("[{}]".format(fp.read().replace("}{", "},{")))
                fn_pricelist.append(pl_data)
            except Exception as ex:
Пример #5
0
import os
from time import time
from datetime import datetime
import logging

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

update_lookups = False

homepath = os.path.expanduser("~")
datapaths = ["OneDrive - Great Canadian Railtour Co", "Jupyter_NB"]
savepath = os.path.join(homepath, *datapaths)
timestamp = datetime.now().strftime("%Y%m%d%H%M%S")

pickle_files = scanfiles('.', r".*\.pickle")
logger.info("Pickles:")
for f in pickle_files:
    logger.info("\t{} => {} [{}]".format(f['file'], f['folder'],
                                         json.dumps(f, indent=2)))

pickle_file = "kaptio_allsell.pickle"

data = get_pickle_data(pickle_file)

for key, value in data.items():
    logger.info(key)

if update_lookups:
    tax_profiles = {
        "Zero Rated": "a8H4F0000003tsfUAA",
Пример #6
0
homepath = os.path.expanduser("~")
datapaths = ["OneDrive - Great Canadian Railtour Co", "Jupyter_NB"]
savepath = os.path.join(homepath, *datapaths)
localpaths = ["data"]
localpath = os.path.join(homepath, *localpaths)

if not os.path.exists(localpath):
    # creeate the folder...
    logger.info("Creating archive directory: {}".format(localpath))
    os.makedirs(localpath)

datestamp = datetime.now()
datapath = os.path.join(savepath, "data")
filecount = 0
for f in scanfiles(datapath, r".*\.json"):
    filecount += 1
    filedate = datetime.strptime(f['modified'], "%Y-%m-%d %H:%M:%S")
    filepath = os.path.join(f.get('folder'), f.get('file'))
    if (datestamp - filedate).total_seconds() > (8 * 60 * 60):
        os.remove(filepath)
        continue

    destfolder = os.path.join(localpath, filedate.strftime("%Y-%m-%d"))
    if not os.path.exists(destfolder):
        os.makedirs(destfolder)
    destfile = os.path.join(destfolder, f.get('file'))
    # copy the file over...
    try:
        shutil.move(filepath, destfile)
    except FileExistsError:
Пример #7
0
def main():
    PATHS = {'LOCAL': os.getcwd(), 'HOME': os.path.expanduser("~")}

    config = load_config('ktapi.json')
    try:
        logging.config.dictConfig(config.get('logger', {}))
    except:
        logging.basicConfig(level=logging.INFO)
    config["paths"] = PATHS

    scan_local = config.get("flags", {}).get("switches",
                                             {}).get("scan_local", False)
    scan_remote = config.get("flags", {}).get("switches",
                                              {}).get("scan_remote", False)
    get_remote_content = config.get("flags",
                                    {}).get("switches",
                                            {}).get("import_remote", False)
    check_updates = config.get("flags",
                               {}).get("switches",
                                       {}).get("check_updates", False)

    savepath = get_folderpath(config, '_remote', PATHS)
    logger.info('Savepath: {}'.format(savepath))
    timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
    logger.info("Timestamp: {}".format(timestamp))

    logger.info(
        "Runnins:\n\tscan local\t{}\n\tscan remote\t{}\n\tget remote\t{}\n\tcheck updates\t{}"
        .format(scan_local, scan_remote, get_remote_content, check_updates))

    if scan_local:
        logger.info("Local Pickles:")
        for f in scanfiles('.', r".*\.pickle"):
            logger.info("\t{} => {}".format(f['file'], f['folder']))

    if scan_remote:
        logger.info("Remote Pickles:")
        for f in scanfiles(os.path.join(savepath, 'config'), r".*\.pickle"):
            logger.info("\t{} => {}".format(f['file'], f['folder']))

    currencies = config.get("presets", {}).get("currencies", ["CAD"])
    for currency in currencies:
        config['presets']['currency'] = currency
        config_type = config.get("configurations", {}).get("run",
                                                           {}).get("pickle")
        pickle_file = get_configuration_path(config, config_type, PATHS)
        name, ext = os.path.splitext(pickle_file)
        pickle_file = "{}_{}{}".format(name, currency, ext)
        logger.info("Loading pickle file {}".format(pickle_file))
        if not 'presets' in config:
            config['presets']

        config['presets']['pickle'] = pickle_file
        data = get_pickle_data(pickle_file)

        if get_remote_content:
            config_type = config.get("configurations",
                                     {}).get("run", {}).get("remote_pickle")
            if not config_type:
                logger.error(
                    "\tUnable to local remote pickle details {configurations->run->remote_pickle}"
                )
                break

            remote_pickle = get_configuration_path(config, config_type,
                                                   config.get('paths', []))
            logger.info("\tloading remote {}".format(remote_pickle))

            data_src = get_pickle_data(remote_pickle)

            if 'content' in data_src:
                logger.info("Fetching remote cached content")
                kt_content = data_src.get('content')

                if kt_content:
                    data['content'] = kt_content
                    logger.info("Retrieved remote cached content")

                    save_pickle_data(data, pickle_file)
        display_data(pickle_file=pickle_file, data=data, name="Local")

        if check_updates:
            update_prices(data=data, data_src=data_src)