def work(datarun_id, args=None): """ A copy of the code in atm/scripts/worker.py A call to this function will start and run a simple worker """ _logger = logging.getLogger('atm_server.worker:work') _logger.setLevel(logging.DEBUG) fh = logging.FileHandler('logs/works.log') fmt = '%(asctime)-12s %(name)s - %(levelname)s %(message)s' fh.setFormatter(logging.Formatter(fmt)) _logger.addHandler(fh) parser = argparse.ArgumentParser(description='Add more classifiers to database') add_arguments_sql(parser) add_arguments_aws_s3(parser) add_arguments_logging(parser) # add worker-specific arguments parser.add_argument('--cloud-mode', action='store_true', default=False, help='Whether to run this worker in cloud mode') parser.add_argument('--time', help='Number of seconds to run worker', type=int) parser.add_argument('--choose-randomly', action='store_true', help='Choose dataruns to work on randomly (default = sequential order)') parser.add_argument('--no-save', dest='save_files', default=True, action='store_const', const=False, help="don't save models and metrics at all") # parse arguments and load configuration if args is None: args = [] _args = parser.parse_args(args) # default logging config is different if initialized from the command line if _args.log_config is None: _args.log_config = os.path.join(atm.PROJECT_ROOT, 'config/templates/log-script.yaml') sql_config, _, aws_config, log_config = load_config(**vars(_args)) initialize_logging(log_config) # let's go _logger.warning('Worker started!') with datarun_config(datarun_id) as config: _logger.warning('Using configs from ' + config.config_path) db = Database(**vars(sql_config)) try: atm_work(db=db, datarun_ids=[datarun_id], choose_randomly=_args.choose_randomly, save_files=_args.save_files, cloud_mode=_args.cloud_mode, aws_config=aws_config, log_config=log_config, total_time=_args.time, wait=False) except Exception as e: _logger.error(e) mark_running_datarun_pending(db, datarun_id) raise e _logger.warning('Worker exited.')
def start_server(): # ATM flags parser = argparse.ArgumentParser() add_arguments_aws_s3(parser) add_arguments_sql(parser) add_arguments_datarun(parser) add_arguments_logging(parser) add_arguments_server(parser) _args = parser.parse_args() if _args.debug: os.environ['FLASK_ENV'] = 'development' app = create_app(vars(_args)) app.run(debug=_args.debug, host=_args.host, port=int(_args.port))
import os import warnings from atm import PROJECT_ROOT from atm.config import (add_arguments_aws_s3, add_arguments_logging, add_arguments_sql, load_config, initialize_logging) from atm.database import Database from atm.worker import Worker, work warnings.filterwarnings('ignore') if __name__ == '__main__': parser = argparse.ArgumentParser(description='Add more classifiers to database') add_arguments_sql(parser) add_arguments_aws_s3(parser) add_arguments_logging(parser) # add worker-specific arguments parser.add_argument('--cloud-mode', action='store_true', default=False, help='Whether to run this worker in cloud mode') parser.add_argument('--dataruns', help='Only train on dataruns with these ids', nargs='+') parser.add_argument('--time', help='Number of seconds to run worker', type=int) parser.add_argument('--choose-randomly', action='store_true', help='Choose dataruns to work on randomly (default = sequential order)') parser.add_argument('--no-save', dest='save_files', default=True, action='store_const', const=False, help="don't save models and metrics at all") # parse arguments and load configuration
def _add_common_arguments(parser): add_arguments_sql(parser) add_arguments_aws_s3(parser) add_arguments_logging(parser)