def set_config(cusid, tech, key, value, mod_name): path = wic.find_DS_config_file_path() json_data = File.load_JSON(path, mod_name) if key is not None and key.upper() in json_data: key1 = key.upper() if key1 == 'PORT': try: json_data[key1] = int(value) File.dump_JSON(path, json_data, mod_name) except Exception as e: logger(__name__).warning('bad value "{}": {} {}'.format( value, type(e), e)) else: json_data[key1] = value File.dump_JSON(path, json_data, mod_name)
def set_config(cusid, tech, key, value, mod_name): if key is not None: path = wic.find_config_file_path() json_data = File.load_JSON(path, mod_name) key1 = key.upper() if key1 in json_data: if key1.endswith('_PATH'): logger(__name__).info('varifying path "{}"...'.format(value)) try: pathlib.Path(value).resolve() json_data[key1] = value except Exception as e: logger(__name__).warning('bad path: {} {}'.format( type(e), e)) else: json_data[key1] = value File.dump_JSON(path, json_data, mod_name)
def set_config(cusid, tech, key, value, mod_name): path = wic.find_DB_config_file_path() json_data = File.load_JSON(path, mod_name) if key is not None: m = re.match('(\w+)[.](\w+)', key) if m is None: key1 = key.upper() if key1 in json_data and type(json_data[key1]) is not dict: json_data[key1] = value File.dump_JSON(path, json_data, mod_name) else: key1, key2 = m.group(1).upper(), m.group(2).upper() if key1 in json_data and key2 in json_data[key1]: if key2 == 'TABLE' and key1 in ['PM', 'CM', 'DC']: logger(__name__).warning( '{}.TABLE: fixed to any table ("*")'.format(key1)) json_data[key1][key2] = '*' elif key2 == 'PORT': json_data[key1][key2] = int(value) else: json_data[key1][key2] = value File.dump_JSON(path, json_data, mod_name)
def create_config_file(cusid, tech, mod_name): path = wic.find_DS_config_file_path() #logger(__name__).debug(path) if not path.exists(): File.dump_JSON(path, customer.get_default_DS_config(cusid, tech), mod_name)
def dump_columns(cusid, tech, CAT, tblname=None, mod_name=__name__): path = column.find_file_path(cusid, tech, CAT) cols = get_columns(cusid, tech, CAT, tblname, mod_name) File.dump_JSON(path, cols, mod_name)
def initialize_working_space(cusid, tech, date=None): base = wic.find_config_path() if date is None: logger(__name__).info('initializing "{}".....'.format(base)) for folder in ['columns', 'columns_bak']: Folder.create(base.joinpath(folder), __name__) create_config_file(cusid, tech, __name__) DataSource.create_config_file(cusid, tech, __name__) DB.create_config_file(cusid, tech, __name__) dpbase1 = _find_data_path(cusid, tech) if dpbase1 is not None: for folder in ['cache']: Folder.create(dpbase1.joinpath(folder), __name__) for p, c in [('COMMON_OBJECT.json', lambda f: File.dump_JSON(f, dict(), __name__))]: f = dpbase1.joinpath(p) if f.exists(): logger(__name__).info('found: "{}"'.format(str(f))) else: c(f) elif type(date) is datetime.date: ymd = '{:%Y%m%d}'.format(date) base = base.joinpath(ymd) fcheck = base.joinpath('files.txt') logger(__name__).info('initailizing "{}"...'.format(fcheck)) if not fcheck.exists(): Folder.create(base, __name__) filelist = get_data_source_list(cusid, tech, date) if filelist == []: sys.exit() with open(str(fcheck), 'w') as fo: for _, ln in enumerate(filelist): fo.write('{}\n'.format(ln)) fo.close() flz = DataSource.get_computed_config(cusid, tech, __name__)[RESTRICT.ZIP_FLT] LRCs = set() for _, p in enumerate(filelist): fn = pathlib.Path(p).name for _, r in enumerate(flz): m = flz[r].match(fn) if type(m) is type(re.match('', '')): try: LRCs.add( (r, m.group(3) if r is 'PM' else m.group(1))) except Exception as e: logger(__name__).debug('{}: {}'.format(m, e)) break for CAT, LRC in LRCs: Folder.create(base.joinpath(CAT).joinpath(LRC), __name__) Folder.create(base.joinpath('history'), __name__) dpbase = _find_data_path(cusid, tech) ymdbase = dpbase.joinpath(ymd) for _, fdrpath in enumerate([ 'tmp', 'columns/check', 'cache/OC', RESTRICT.CO, RESTRICT.CM, RESTRICT.OC, RESTRICT.DC, RESTRICT.FM, RESTRICT.PM ]): Folder.create(ymdbase.joinpath(fdrpath), __name__)