def main(config_path): # 1: load config config = load_config(config_path) # 2: Build alias store alias_store = AliasStore.from_alias_list(config['aliases']) # 3: ensure local dirs exist and peers are known, build file indexes remotes = [] for remote in config['remotes']: # remote_name = f"remote `{remote['name']}'" if \ # remote['name'] is not None else 'unnamed remote' remote_name = remote['name'] if os.path.isdir(remote['local_path']): print(f"✓ Local directory exists for {remote_name} at " f"{remote['local_path']}") for peer in remote['peers']: if not alias_store.is_known(peer): print(f"Error: Unknown peer `{peer}' in config for " f"{remote_name}") graceful_exit(1) print('Building index...', end='', flush=True) remotes.append( Remote(remote['local_path'], remote['peers'], name=remote['name'])) print(' Done') else: print(f"✗ Local directory does not exist for {remote_name} at " f"{remote['local_path']}") graceful_exit(1) # 4: check for updates for remote in remotes: print(f'Checking for updates on {remote.name}...') # remote.file_index.print_files(lpad=' ') remote.update(alias_store, lpad=' ') # 5: open thread to listen for update requests file_indexes = {} for remote in remotes: file_indexes[remote.name] = remote.file_index print(f"Starting FileServer on port {config['port']}.") FileServer(file_indexes, config['port']) # 6: start update loop while True: time.sleep(60 * config['update_interval']) for remote in remotes: remote.file_index.update() remote.update(alias_store, lpad=' ')
def parse_args(): default_output = formatted_filepath('output', datestamp=True) default_logger = formatted_filepath(suffix='log', sep='.') parser = optparse.OptionParser() parser.add_option("", "--domain", default=None, help="Site name. Supported, hdfcsec") parser.add_option("", "--txn-type", default=None, help="Stock/MF") parser.add_option("", "--txn-files", default=None, help="Transaction csv/xls files. Multiple values supported as comma-separated-values") parser.add_option("-o", "--outdir", default=default_output, help="Output dir name") parser.add_option("-l", "--logfile", default=default_logger, help="Logfile name") (options, args) = parser.parse_args() if not (options.domain and options.txn_type and options.txn_files): parser.print_help() graceful_exit("Mandatory arguments missing!! Please try again") return (options, args)
def main(domain, txn_type, txn_files, outdir): if not os.path.exists(outdir): os.makedirs(outdir) if domain not in domain_settings: graceful_exit("--domain `%s` is not yet supported!", domain) txn_type = txn_type.lower() if txn_type not in ['stock', 'mf']: graceful_exit("--txn-type must be `stock` or `mf`. `%s` not yet supported!", txn_type) num_header_rows = domain_settings[domain]['num_header_rows'] txn_df = pandas.DataFrame() for txn_file in txn_files.split(','): print("Parsing `%s`"%txn_file) if 'xls' in txn_file: tmp_txn_df = handle_xls(txn_file, outdir, num_header_rows) elif 'csv' in txn_file: tmp_txn_df = handle_csv(txn_file, outdir, num_header_rows) else: graceful_exit("Only xls, xlsx, csv format are supported") print("Loaded %d transactions from `%s`"%(len(tmp_txn_df), txn_file)) try: txn_df = txn_df.append(tmp_txn_df) except Exception, ee: logging.error(str(ee)) graceful_exit("ERROR ALERT: Print ensure all transaction reports are of same format")
def load_config(config_path): try: with open(config_path) as f: config = json.load(f) except (FileNotFoundError) as e: # TODO: use e? print(f'Error: Could not load config file `{config_path}\'.') graceful_exit(1) defaults = {'update_interval': 15, 'port': 6688} # set default update interval if not specified if 'update_interval' not in config: print( "Warning: Update interval not specified in config, using default value " f"{defaults['update_interval']}.") config['update_interval'] = defaults['update_interval'] else: try: config['update_interval'] = float(config['update_interval']) except ValueError: print( "Warning: Invalid update interval in config, using default value " f"{defaults['update_interval']}.") config['update_interval'] = defaults['update_interval'] # set default port if not specified if 'port' not in config: print("Warning: Port not specified in config, using default value " f"{defaults['port']}.") config['port'] = defaults['port'] # ensure config format valid # ensure all remotes are named and there are no duplicates remote_names = set() for remote in config['remotes']: if remote['name'] is None or remote['name'] == '': print('Error: Unnamed remote found in config.') graceful_exit(1) elif remote['name'] in remote_names: print(f"Error: Duplicate remote name `{remote['name']}' found.") graceful_exit(1) remote_names.add(remote['name']) return config
# 4: check for updates for remote in remotes: print(f'Checking for updates on {remote.name}...') # remote.file_index.print_files(lpad=' ') remote.update(alias_store, lpad=' ') # 5: open thread to listen for update requests file_indexes = {} for remote in remotes: file_indexes[remote.name] = remote.file_index print(f"Starting FileServer on port {config['port']}.") FileServer(file_indexes, config['port']) # 6: start update loop while True: time.sleep(60 * config['update_interval']) for remote in remotes: remote.file_index.update() remote.update(alias_store, lpad=' ') if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--config', type=str, help='load custom config file') args = parser.parse_args() config_path = args.config if args.config else 'config.json' try: main(config_path) except KeyboardInterrupt: graceful_exit(0)