class BaseAPI(BaseLogger): env_name = os.environ['ENVIRONMENT'] def __init__(self): self.env_name = os.environ['ENVIRONMENT'] self.env_config = ConfigParser( f'config/{self.env_name}_env.ini').config self.common_config = ConfigParser('config/common.ini').config self.root_url = self.common_config.get('main_url', 'main_url') self.user_email = self.env_config.get('user_config', 'user_email') self.user_password = self.env_config.get('user_config', 'user_password') @staticmethod def get(url): BaseLogger.get_info_log('Sending GET url: {}.'.format(url)) response = requests.get(url) return response def post(self, url, body): BaseLogger.get_info_log('Sending POST url: {} body: {}.'.format( url, body)) response = requests.post(url, data=body) parsed_response = self.parse_response_to_json(response) BaseLogger.get_info_log('Received "{}".'.format(response)) BaseLogger.get_info_log('Response body "{}".'.format(parsed_response)) return response @staticmethod def parse_response_to_json(response): return response.json() @staticmethod def ger_response_status_code(response): return response.status_code
def __init__(self): self.env_name = os.environ['ENVIRONMENT'] self.env_config = ConfigParser( f'config/{self.env_name}_env.ini').config self.common_config = ConfigParser('config/common.ini').config self.root_url = self.common_config.get('main_url', 'main_url') self.user_email = self.env_config.get('user_config', 'user_email') self.user_password = self.env_config.get('user_config', 'user_password')
def createProcesses(): from logging import basicConfig, info, INFO from config.config_parser import ConfigParser from multiprocessing import Process from os import waitpid import socket procs = [] config = ConfigParser.parse() basicConfig(level=INFO) info('\nhost: {}\nport: {}\nthreads: {}\ncpu_count: {}'.format( config.host, config.port, config.threads, config.cpu_count)) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind((config.host, int(config.port))) s.listen(100) try: for _ in range(0, int(config.cpu_count)): procs.append(Process(target=startProcess, args=([config, s]))) for i in procs: i.start() for i in procs: waitpid(i.pid, 0) except KeyboardInterrupt: for i in procs: i.terminate()
def do_read_cfg_file(self, e): config_dir = os.path.expanduser(self.args.config_dir) # create configuration directory if it is not present # so that user can easily put his configuration there if config_dir and not os.path.exists(config_dir): logger.debug("Creating path '{}'".format(config_dir)) os.makedirs(config_dir) config_file_path = self.get_baking_cfg_file(config_dir) logger.info( "Loading baking configuration file {}".format(config_file_path)) self.__config_text = ConfigParser.load_file(config_file_path)
def runIndependentChecks(configFile, interactive): configYaml = ConfigParser.parseConfig(configFile) if interactive is None: interactive = InteractiveClient(configYaml['authType'], AZURE_PUBLIC_CLOUD) interactive.getUserNamePasswordCredentials() config = Config( credentials=interactive.getCredentials(), userClientSecret=str(uuid.uuid4()), subscriptionId=configYaml['complianceSetup']['subscriptionId'], tenantId=configYaml['tenantId'], identifierUrl=IDENTIFIER_URL, cloudType=AZURE_PUBLIC_CLOUD.name, updateApp=False, configJson=configYaml) # Checking if App exists config.setIsUpdateApp(interactive.getUpdateApp(config)) credentialsProvider = CredentialsProviderFactory.getCredentialsProvider( config) providerUtil = ProviderUtil(credentialsProvider) # Check which providers to register toRegister = [] for provider in PROVIDER_REGISTRATION_LIST_COMPLIANCE: if not providerUtil.checkProviderExists( provider, config.getSubscriptionList()[0]): toRegister.append(provider) if config.isActivityLogSetup(): for provider in PROVIDER_REGISTRATION_LIST_ACTIVITY: if not providerUtil.checkProviderExists( provider, config.getSubscriptionList()[0]): toRegister.append(provider) return toRegister, config, interactive
def main(args): logger.info("TRD version {} is running in {} mode.".format( version.version, "daemon" if args.background_service else "interactive")) logger.info("Arguments Configuration = {}".format( json.dumps(args.__dict__, indent=1))) # 1- find where configuration is config_dir = os.path.expanduser(args.config_dir) # create configuration directory if it is not present # so that user can easily put his configuration there if config_dir and not os.path.exists(config_dir): os.makedirs(config_dir) # 2- Load master configuration file if it is present master_config_file_path = os.path.join(config_dir, "master.yaml") master_cfg = {} if os.path.isfile(master_config_file_path): logger.debug("Loading master configuration file {}".format( master_config_file_path)) master_parser = YamlConfParser( ConfigParser.load_file(master_config_file_path)) master_cfg = master_parser.parse() else: logger.debug("master configuration file not present.") managers = None contracts_by_alias = None addresses_by_pkh = None if 'managers' in master_cfg: managers = master_cfg['managers'] if 'contracts_by_alias' in master_cfg: contracts_by_alias = master_cfg['contracts_by_alias'] if 'addresses_by_pkh' in master_cfg: addresses_by_pkh = master_cfg['addresses_by_pkh'] # 3- get client path client_path = get_client_path( [x.strip() for x in args.executable_dirs.split(',')], args.docker, args.network, args.verbose) logger.debug("Dune client path is {}".format(client_path)) # 4. get network config config_client_manager = SimpleClientManager(client_path) network_config_map = init_network_config(args.network, config_client_manager, args.node_addr) network_config = network_config_map[args.network] logger.debug("Network config {}".format(network_config)) # 5- load baking configuration file config_file_path = get_baking_configuration_file(config_dir) logger.info( "Loading baking configuration file {}".format(config_file_path)) wllt_clnt_mngr = WalletClientManager(client_path, contracts_by_alias, addresses_by_pkh, managers, verbose=args.verbose) provider_factory = ProviderFactory(args.reward_data_provider, verbose=args.verbose) parser = BakingYamlConfParser(ConfigParser.load_file(config_file_path), wllt_clnt_mngr, provider_factory, network_config, args.node_addr, verbose=args.verbose) parser.parse() parser.validate() parser.process() cfg_dict = parser.get_conf_obj() # dictionary to BakingConf object, for a bit of type safety cfg = BakingConf(cfg_dict, master_cfg) logger.info("Baking Configuration {}".format(cfg)) baking_address = cfg.get_baking_address() payment_address = cfg.get_payment_address() logger.info(LINER) logger.info("BAKING ADDRESS is {}".format(baking_address)) logger.info("PAYMENT ADDRESS is {}".format(payment_address)) logger.info(LINER) # 6- is it a reports run dry_run = args.dry_run_no_consumers or args.dry_run if args.dry_run_no_consumers: global NB_CONSUMERS NB_CONSUMERS = 0 # 7- get reporting directories reports_base = os.path.expanduser(args.reports_base) # if in reports run mode, do not create consumers # create reports in reports directory if dry_run: reports_base = os.path.expanduser("./reports") reports_dir = os.path.join(reports_base, baking_address) payments_root = get_payment_root(reports_dir, create=True) calculations_root = get_calculations_root(reports_dir, create=True) get_successful_payments_dir(payments_root, create=True) get_failed_payments_dir(payments_root, create=True) # 8- start the life cycle life_cycle.start(not dry_run) # 9- service fee calculator srvc_fee_calc = ServiceFeeCalculator(cfg.get_full_supporters_set(), cfg.get_specials_map(), cfg.get_service_fee()) if args.initial_cycle is None: recent = get_latest_report_file(payments_root) # if payment logs exists set initial cycle to following cycle # if payment logs does not exists, set initial cycle to 0, so that payment starts from last released rewards args.initial_cycle = 0 if recent is None else int(recent) + 1 logger.info("initial_cycle set to {}".format(args.initial_cycle)) p = PaymentProducer(name='producer', initial_payment_cycle=args.initial_cycle, network_config=network_config, payments_dir=payments_root, calculations_dir=calculations_root, run_mode=RunMode(args.run_mode), service_fee_calc=srvc_fee_calc, release_override=args.release_override, payment_offset=args.payment_offset, baking_cfg=cfg, life_cycle=life_cycle, payments_queue=payments_queue, dry_run=dry_run, wllt_clnt_mngr=wllt_clnt_mngr, node_url=args.node_addr, provider_factory=provider_factory, verbose=args.verbose) p.start() publish_stats = not args.do_not_publish_stats for i in range(NB_CONSUMERS): c = PaymentConsumer( name='consumer' + str(i), payments_dir=payments_root, key_name=payment_address, client_path=client_path, payments_queue=payments_queue, node_addr=args.node_addr, wllt_clnt_mngr=wllt_clnt_mngr, args=args, verbose=args.verbose, dry_run=dry_run, delegator_pays_xfer_fee=cfg.get_delegator_pays_xfer_fee(), dest_map=cfg.get_dest_map(), network_config=network_config, publish_stats=publish_stats) time.sleep(1) c.start() logger.info("Application start completed") logger.info(LINER) try: while life_cycle.is_running(): time.sleep(10) except KeyboardInterrupt: logger.info("Interrupted.") life_cycle.stop()
def main(args): logger.info("Arguments Configuration = {}".format( json.dumps(args.__dict__, indent=1))) # 1- find where configuration is config_dir = os.path.expanduser(args.config_dir) # create configuration directory if it is not present # so that user can easily put his configuration there if config_dir and not os.path.exists(config_dir): os.makedirs(config_dir) # 2- Load master configuration file if it is present master_config_file_path = os.path.join(config_dir, "master.yaml") master_cfg = {} if os.path.isfile(master_config_file_path): logger.info("Loading master configuration file {}".format( master_config_file_path)) master_parser = YamlConfParser( ConfigParser.load_file(master_config_file_path)) master_cfg = master_parser.parse() else: logger.info("master configuration file not present.") managers = None contracts_by_alias = None addresses_by_pkh = None if 'managers' in master_cfg: managers = master_cfg['managers'] if 'contracts_by_alias' in master_cfg: contracts_by_alias = master_cfg['contracts_by_alias'] if 'addresses_by_pkh' in master_cfg: addresses_by_pkh = master_cfg['addresses_by_pkh'] # 3- load payments file payments_file = os.path.expanduser(args.payments_file) if not os.path.isfile(payments_file): raise Exception( "payments_file ({}) does not exist.".format(payments_file)) with open(payments_file, 'r') as file: payment_lines = file.readlines() payments_dict = {} for line in payment_lines: pkh, amt = line.split(":") pkh = pkh.strip() amt = float(amt.strip()) payments_dict[pkh] = amt if not payments_dict: raise Exception("No payments to process") # 4- get client path network_config = network_config_map[args.network] client_path = get_client_path( [x.strip() for x in args.executable_dirs.split(',')], args.docker, network_config, args.verbose) logger.debug("Tezos client path is {}".format(client_path)) # 6- is it a reports run dry_run = args.dry_run # 7- get reporting directories reports_dir = os.path.expanduser(args.reports_dir) # if in reports run mode, do not create consumers # create reports in reports directory if dry_run: reports_dir = os.path.expanduser("./reports") reports_dir = os.path.join(reports_dir, "manual") payments_root = get_payment_root(reports_dir, create=True) calculations_root = get_calculations_root(reports_dir, create=True) get_successful_payments_dir(payments_root, create=True) get_failed_payments_dir(payments_root, create=True) wllt_clnt_mngr = WalletClientManager(client_path, contracts_by_alias, addresses_by_pkh, managers) for i in range(NB_CONSUMERS): c = PaymentConsumer(name='manual_payment_consumer', payments_dir=payments_root, key_name=args.paymentaddress, client_path=client_path, payments_queue=payments_queue, node_addr=args.node_addr, wllt_clnt_mngr=wllt_clnt_mngr, verbose=args.verbose, dry_run=dry_run, delegator_pays_xfer_fee=False) time.sleep(1) c.start() base_name_no_ext = os.path.basename(payments_file) base_name_no_ext = os.path.splitext(base_name_no_ext)[0] now = datetime.now() now_str = now.strftime("%Y%m%d%H%M%S") file_name = base_name_no_ext + "_" + now_str payment_items = [] for key, value in payments_dict.items(): payment_items.append( PaymentRecord.ManualInstance(file_name, key, value)) payments_queue.put(payment_items) payments_queue.put([PaymentRecord.ExitInstance()])
def main(args): logger.info("Arguments Configuration = {}".format( json.dumps(args.__dict__, indent=1))) # 1- find where configuration is config_dir = os.path.expanduser(args.config_dir) # create configuration directory if it is not present # so that user can easily put his configuration there if config_dir and not os.path.exists(config_dir): os.makedirs(config_dir) # 2- Load master configuration file if it is present master_config_file_path = os.path.join(config_dir, "master.yaml") master_cfg = {} if os.path.isfile(master_config_file_path): logger.info("Loading master configuration file {}".format( master_config_file_path)) master_parser = YamlConfParser( ConfigParser.load_file(master_config_file_path)) master_cfg = master_parser.parse() else: logger.info("master configuration file not present.") managers = None contracts_by_alias = None addresses_by_pkh = None if 'managers' in master_cfg: managers = master_cfg['managers'] if 'contracts_by_alias' in master_cfg: contracts_by_alias = master_cfg['contracts_by_alias'] if 'addresses_by_pkh' in master_cfg: addresses_by_pkh = master_cfg['addresses_by_pkh'] # 3- get client path client_path = get_client_path( [x.strip() for x in args.executable_dirs.split(',')], args.docker, args.network, args.verbose) logger.debug("Tezos client path is {}".format(client_path)) # 4. get network config config_client_manager = SimpleClientManager(client_path, args.node_addr) network_config_map = init_network_config(args.network, config_client_manager, args.node_addr) network_config = network_config_map[args.network] # 5- load baking configuration file config_file_path = get_baking_configuration_file(config_dir) logger.info( "Loading baking configuration file {}".format(config_file_path)) wllt_clnt_mngr = WalletClientManager(client_path, contracts_by_alias, addresses_by_pkh, managers, verbose=args.verbose) provider_factory = ProviderFactory(args.reward_data_provider, verbose=args.verbose) parser = BakingYamlConfParser(ConfigParser.load_file(config_file_path), wllt_clnt_mngr, provider_factory, network_config, args.node_addr, verbose=args.verbose, api_base_url=args.api_base_url) parser.parse() parser.validate() parser.process() cfg_dict = parser.get_conf_obj() # dictionary to BakingConf object, for a bit of type safety cfg = BakingConf(cfg_dict, master_cfg) logger.info("Baking Configuration {}".format(cfg)) baking_address = cfg.get_baking_address() payment_address = cfg.get_payment_address() logger.info(LINER) logger.info("BAKING ADDRESS is {}".format(baking_address)) logger.info("PAYMENT ADDRESS is {}".format(payment_address)) logger.info(LINER) # 6- is it a reports run dry_run = args.dry_run_no_consumers or args.dry_run if args.dry_run_no_consumers: global NB_CONSUMERS NB_CONSUMERS = 0 # 7- get reporting directories reports_dir = os.path.expanduser(args.reports_base) # if in reports run mode, do not create consumers # create reports in reports directory if dry_run: reports_dir = os.path.expanduser("./reports") reports_dir = os.path.join(reports_dir, baking_address) payments_root = get_payment_root(reports_dir, create=True) calculations_root = get_calculations_root(reports_dir, create=True) get_successful_payments_dir(payments_root, create=True) get_failed_payments_dir(payments_root, create=True) # 8- start the life cycle life_cycle.start(False) # 9- service fee calculator srvc_fee_calc = ServiceFeeCalculator(cfg.get_full_supporters_set(), cfg.get_specials_map(), cfg.get_service_fee()) try: p = PaymentProducer(name='producer', initial_payment_cycle=None, network_config=network_config, payments_dir=payments_root, calculations_dir=calculations_root, run_mode=RunMode.ONETIME, service_fee_calc=srvc_fee_calc, release_override=0, payment_offset=0, baking_cfg=cfg, life_cycle=life_cycle, payments_queue=payments_queue, dry_run=dry_run, wllt_clnt_mngr=wllt_clnt_mngr, node_url=args.node_addr, provider_factory=provider_factory, verbose=args.verbose, api_base_url=args.api_base_url) p.retry_failed_payments(args.retry_injected) c = PaymentConsumer( name='consumer_retry_failed', payments_dir=payments_root, key_name=payment_address, client_path=client_path, payments_queue=payments_queue, node_addr=args.node_addr, wllt_clnt_mngr=wllt_clnt_mngr, verbose=args.verbose, dry_run=dry_run, delegator_pays_xfer_fee=cfg.get_delegator_pays_xfer_fee(), network_config=network_config) time.sleep(1) c.start() p.exit() c.join() logger.info("Application start completed") logger.info(LINER) sleep(5) except KeyboardInterrupt: logger.info("Interrupted.")
def main(args): logger.info("Arguments Configuration = {}".format( json.dumps(args.__dict__, indent=1))) # 1- find where configuration is config_dir = os.path.expanduser(args.config_dir) # create configuration directory if it is not present # so that user can easily put his configuration there if config_dir and not os.path.exists(config_dir): os.makedirs(config_dir) # 2- Load master configuration file if it is present master_config_file_path = os.path.join(config_dir, "master.yaml") master_cfg = {} if os.path.isfile(master_config_file_path): logger.debug("Loading master configuration file {}".format( master_config_file_path)) master_parser = YamlConfParser( ConfigParser.load_file(master_config_file_path)) master_cfg = master_parser.parse() else: logger.debug("master configuration file not present.") managers = None contracts_by_alias = None addresses_by_pkh = None if 'managers' in master_cfg: managers = master_cfg['managers'] if 'contracts_by_alias' in master_cfg: contracts_by_alias = master_cfg['contracts_by_alias'] if 'addresses_by_pkh' in master_cfg: addresses_by_pkh = master_cfg['addresses_by_pkh'] # 3- get client path client_path = get_client_path( [x.strip() for x in args.executable_dirs.split(',')], args.docker, args.network) logger.debug("tezos-client path is {}".format(client_path)) # 4. get network config config_client_manager = SimpleClientManager(client_path, args.node_addr) network_config_map = init_network_config(args.network, config_client_manager, args.node_addr) global network_config network_config = network_config_map[args.network] logger.debug("Network config {}".format(network_config)) global wllt_clnt_mngr wllt_clnt_mngr = WalletClientManager(client_path, args.node_addr, contracts_by_alias, addresses_by_pkh, managers) # hello state command = input("{} >".format(messages['hello'])).strip() start() while not fsm.is_finished(): sleep(0.1) command = input("{} >\n".format(messages[fsm.current])).strip() callbacks[fsm.current](command) pass parser.validate() parser.process() cfg_dict = parser.get_conf_obj() # dictionary to BakingConf object, for a bit of type safety cfg = BakingConf(cfg_dict, master_cfg) config_file_path = os.path.join(os.path.abspath(config_dir), cfg.get_baking_address() + '.yaml') cfg_dict_plain = { k: v for k, v in cfg_dict.items() if not k.startswith('__') } with open(config_file_path, 'w') as outfile: yaml.dump(cfg_dict_plain, outfile, default_flow_style=True, indent=4) print("Configuration file is created at '{}'".format(config_file_path))
import argparse service_account_credentials_file_location = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + "/sa_credentials.json" sa_env_var = os.environ.get("sa_cred_file") if sa_env_var: service_account_credentials_file_location = sa_env_var def raiseException(ex): raise ex if __name__ == "__main__": ap = argparse.ArgumentParser() ap.add_argument("--mode", required=False, default="interactive", help="mode is either interactive | non-interactive") args, unknown = ap.parse_known_args() args = vars(args) mode = args["mode"] credentials_data, project_id, isCloudShell = ConfigParser.getCredentialsFileData(service_account_credentials_file_location) config = Config(credentials_data, project_id, isCloudShell, None, None, None, None, None) client = UserClientFactory.getClient(mode, config) client.initConfig() client.validate() client.run()
class ConfigParserTests(TestCase): def setUp(self): self.cp = ConfigParser() def test_parse_file(self): #create test config file directory = mkdtemp('-caboose-config-parser-tests') filename = path.join(directory, "config") with open(filename, "w") as f: f.write("""{ "stats" : [ { "statname": "statnamehere" } ] }""") conf = self.cp.parse_file(filename) eq_('statnamehere', conf['stats'][0]['statname']) rmtree(directory) def test_parse_config_object(self): json = """{ "output_directory": "/path/to/outputdir", "stats": [ { "description": "# of blahblah in blah", "statname": "java_ncss", "repodir": "/path/to/code/codedir", "dirs": ["CodeDirectory"], "glob": "*.java", "start_time_delta": 2592000, "datapoint_time_delta": 604800, "outfile": "shared_ncss.json" } ] }""" conf = self.cp.parse_text(json) eq_('/path/to/outputdir', conf['output_directory']) eq_('# of blahblah in blah', conf['stats'][0]['description']) eq_('java_ncss', conf['stats'][0]['statname']) eq_('/path/to/code/codedir', conf['stats'][0]['repodir']) eq_('CodeDirectory', conf['stats'][0]['dirs'][0]) eq_('*.java', conf['stats'][0]['glob']) eq_(2592000, conf['stats'][0]['start_time_delta']) eq_(604800, conf['stats'][0]['datapoint_time_delta']) eq_('shared_ncss.json', conf['stats'][0]['outfile']) def test_parse_config_copies_select_options_to_stat_level(self): json = """{ "output_directory": "/path/to/outputdir", "repodir": "/path/to/code/codedir", "dirs": ["CodeDirectory"], "glob": "*.java", "start_time_delta": 2592000, "sample_time_interval": 604800, "exclude_dirs": ["ExcludeMe"], "exclude_path_globs": ["*/test/*"], "datatype": "count", "stats": [ { "description": "# of blahblah in blah", "statname": "java_ncss", "outfile": "shared_ncss.json" } ] }""" conf = self.cp.parse_text(json) eq_('/path/to/code/codedir', conf['stats'][0]['repodir']) eq_('CodeDirectory', conf['stats'][0]['dirs'][0]) eq_('*.java', conf['stats'][0]['glob']) eq_(2592000, conf['stats'][0]['start_time_delta']) eq_(604800, conf['stats'][0]['sample_time_interval']) eq_("ExcludeMe", conf['stats'][0]['exclude_dirs'][0]) eq_("*/test/*", conf['stats'][0]['exclude_path_globs'][0]) eq_("count", conf['stats'][0]['datatype']) def test_parse_config_does_not_copy_over_stat_option(self): json = """{ "output_directory": "/path/to/outputdir", "repodir": "/path/to/code/codedir", "glob": "*.java", "stats": [ { "description": "# of blahblah in blah", "glob": "*.py", "statname": "java_ncss", "outfile": "shared_ncss.json" } ] }""" conf = self.cp.parse_text(json) eq_('*.py', conf['stats'][0]['glob'])
def setUp(self): self.cp = ConfigParser()
def main(args): logger.info("TRD version {} is running in {} mode.".format( VERSION, "daemon" if args.background_service else "interactive")) logger.info("Arguments Configuration = {}".format( json.dumps(args.__dict__, indent=1))) publish_stats = not args.do_not_publish_stats logger.info( "Anonymous statistics {} be collected. See docs/statistics.rst for more information." .format("will" if publish_stats else "will not")) # 1- find where configuration is config_dir = os.path.expanduser(args.config_dir) # create configuration directory if it is not present # so that user can easily put his configuration there if config_dir and not os.path.exists(config_dir): os.makedirs(config_dir) # 4. get network config client_manager = ClientManager(node_endpoint=args.node_endpoint, signer_endpoint=args.signer_endpoint) network_config_map = init_network_config(args.network, client_manager) network_config = network_config_map[args.network] logger.debug("Network config {}".format(network_config)) # Setup provider to fetch RPCs provider_factory = ProviderFactory(args.reward_data_provider) # 5- load and verify baking configuration file config_file_path = None try: config_file_path = get_baking_configuration_file(config_dir) logger.info( "Loading baking configuration file {}".format(config_file_path)) parser = BakingYamlConfParser( yaml_text=ConfigParser.load_file(config_file_path), clnt_mngr=client_manager, provider_factory=provider_factory, network_config=network_config, node_url=args.node_endpoint, api_base_url=args.api_base_url) parser.parse() parser.validate() parser.process() # dictionary to BakingConf object, for a bit of type safety cfg_dict = parser.get_conf_obj() cfg = BakingConf(cfg_dict) except ConfigurationException as e: logger.info( "Unable to parse '{}' config file.".format(config_file_path)) logger.info(e) sys.exit(1) logger.info("Baking Configuration {}".format(cfg)) baking_address = cfg.get_baking_address() payment_address = cfg.get_payment_address() logger.info(LINER) logger.info("BAKING ADDRESS is {}".format(baking_address)) logger.info("PAYMENT ADDRESS is {}".format(payment_address)) logger.info(LINER) # 6- is it a reports run dry_run = args.dry_run_no_consumers or args.dry_run if args.dry_run_no_consumers: global NB_CONSUMERS NB_CONSUMERS = 0 # 7- get reporting directories reports_base = os.path.expanduser(args.reports_base) # if in reports run mode, do not create consumers # create reports in reports directory if dry_run: reports_base = os.path.expanduser("./reports") reports_dir = os.path.join(reports_base, baking_address) payments_root = get_payment_root(reports_dir, create=True) calculations_root = get_calculations_root(reports_dir, create=True) get_successful_payments_dir(payments_root, create=True) get_failed_payments_dir(payments_root, create=True) # 8- start the life cycle life_cycle.start(not dry_run) # 9- service fee calculator srvc_fee_calc = ServiceFeeCalculator(cfg.get_full_supporters_set(), cfg.get_specials_map(), cfg.get_service_fee()) if args.initial_cycle is None: recent = get_latest_report_file(payments_root) # if payment logs exists set initial cycle to following cycle # if payment logs does not exists, set initial cycle to 0, so that payment starts from last released rewards args.initial_cycle = 0 if recent is None else int(recent) + 1 logger.info("initial_cycle set to {}".format(args.initial_cycle)) # 10- load plugins plugins_manager = plugins.PluginManager(cfg.get_plugins_conf(), dry_run) # 11- Start producer and consumer p = PaymentProducer(name='producer', initial_payment_cycle=args.initial_cycle, network_config=network_config, payments_dir=payments_root, calculations_dir=calculations_root, run_mode=RunMode(args.run_mode), service_fee_calc=srvc_fee_calc, release_override=args.release_override, payment_offset=args.payment_offset, baking_cfg=cfg, life_cycle=life_cycle, payments_queue=payments_queue, dry_run=dry_run, client_manager=client_manager, node_url=args.node_endpoint, provider_factory=provider_factory, node_url_public=args.node_addr_public, api_base_url=args.api_base_url, retry_injected=args.retry_injected) p.start() for i in range(NB_CONSUMERS): c = PaymentConsumer( name='consumer' + str(i), payments_dir=payments_root, key_name=payment_address, payments_queue=payments_queue, node_addr=args.node_endpoint, client_manager=client_manager, plugins_manager=plugins_manager, rewards_type=cfg.get_rewards_type(), args=args, dry_run=dry_run, reactivate_zeroed=cfg.get_reactivate_zeroed(), delegator_pays_ra_fee=cfg.get_delegator_pays_ra_fee(), delegator_pays_xfer_fee=cfg.get_delegator_pays_xfer_fee(), dest_map=cfg.get_dest_map(), network_config=network_config, publish_stats=publish_stats) sleep(1) c.start() logger.info("Application start completed") logger.info(LINER) # Run forever try: while life_cycle.is_running(): sleep(10) except KeyboardInterrupt: logger.info("Interrupted.") life_cycle.stop()
def __init__(self, driver): self.common_config = ConfigParser('/common.ini').config self.root_url = self.common_config.get('main_url', 'main_url') self.driver = driver
class BasePage: def __init__(self, driver): self.common_config = ConfigParser('/common.ini').config self.root_url = self.common_config.get('main_url', 'main_url') self.driver = driver def get_driver_version(self): if os.environ['BROWSER'] == 'chrome': return re.findall( r'(\w+.\w+.\w+.\w+) ', self.driver.capabilities['chrome']['chromedriverVersion'])[0] elif os.environ['BROWSER'] == 'firefox': return self.driver.capabilities['moz:geckodriverVersion'] def open_page(self, url=""): try: self.driver.get('{}{}'.format(self.root_url, url)) except TimeoutException: self.driver.refresh() def click(self, *locator): self.wait_until_element_is_visible(locator) self.driver.find_element(*locator).click() def move_to_element(self, *locator): self.wait_until_element_is_visible(locator) element = self.driver.find_element(*locator) actions = ActionChains(self.driver) actions.move_to_element(element) actions.perform() def is_displayed(self, *locator): return self.driver.find_element(*locator).is_displayed() def wait_until_element_is_visible(self, locator, timeout=TIMEOUT_SEC): try: WebDriverWait(self.driver, timeout).until( EC.visibility_of_element_located(locator)) except TimeoutException: raise AssertionError( f'Element {locator} missed. It takes more than {timeout} sec to load an element' ) def is_editable(self, *locator, **kwargs): random_str = DataGenerators.generate_random_alphanumeric_str() if kwargs.get('date_format'): random_str = DEFAULT_VALUES.get('Date of Construction') self.type(random_str, *locator) return self.get_field_input_value(*locator) == random_str def type(self, text, *locator, **kwargs): self.wait_until_element_is_visible(locator) element = self.driver.find_element(*locator) if not kwargs.get('without_clear'): element.clear() element.send_keys(text) def open_dropdown_and_click_on_value(self, dropdown, dropdown_value): self.click(*dropdown) self.click(*dropdown_value) def get_field_input_value(self, *locator): return self.driver.find_element(*locator).get_attribute('value') def get_value_from_grid_or_dropdown(self, *locator): return self.driver.find_element(*locator).text