def test_tee_output_with_SystemExit(): fileobj = StringIO() with assert_raises(SystemExit), stdfake() as fake, tee_output(fileobj): raise SystemExit(1) eq(fake.stdout.getvalue(), "") eq(fake.stderr.getvalue(), "") eq(fileobj.getvalue(), "")
def handle(self, name, dbname, chunk_size, yes, print_rows, **options): template = TEMPLATES[name] sql = template.format(chunk_size=chunk_size) run = getattr(template, "run", run_once) dbnames = get_db_aliases_for_partitioned_query() if options.get('log_output'): logfile = "{}-{}.log".format(name, datetime.now().isoformat()) print("writing output to file: {}".format(logfile)) else: logfile = None with tee_output(logfile): if dbname or len(dbnames) == 1: run(sql, dbname or dbnames[0], print_rows) elif not (yes or confirm(MULTI_DB % len(dbnames))): sys.exit('abort') else: greenlets = [] for dbname in dbnames: g = gevent.spawn(run, sql, dbname, print_rows) greenlets.append(g) gevent.joinall(greenlets) try: for job in greenlets: job.get() except Exception: traceback.print_exc()
def test_tee_output_with_SystemExit(): fileobj = StringIO() with assert_raises(SystemExit), stdfake() as fake, tee_output(fileobj): raise SystemExit(1) eq(fake.stdout.getvalue(), "") eq(fake.stderr.getvalue(), "") eq(fileobj.getvalue(), "")
def handle(self, slug=None, log_dir=None, reset=False, chunk_size=100, **options): try: migrator = MIGRATIONS[slug] except KeyError: raise CommandError(USAGE) def do_migration(): total, skips = migrator.migrate( log_file, reset=reset, chunk_size=chunk_size, ) if skips: sys.exit(skips) if log_dir is None: log_file = None do_migration() else: now = datetime.utcnow().strftime("%Y%m%dT%H%M%SZ") summary_file = os.path.join(log_dir, "{}-blob-migration-{}-summary.txt".format(slug, now)) log_file = os.path.join(log_dir, "{}-blob-migration-{}.txt".format(slug, now)) assert not os.path.exists(summary_file), summary_file assert not os.path.exists(log_file), log_file with open(summary_file, "w", 1, encoding='utf-8') as fh, tee_output(fh): do_migration()
def test_tee_output_with_SystemExit(): fileobj = StringIO() fake = fakesys() with assert_raises(SystemExit), tee_output(fileobj, sys=fake): raise SystemExit(1) eq(fake.stdout.getvalue(), "") eq(fake.stderr.getvalue(), "") eq(fileobj.getvalue(), "")
def test_tee_output_with_KeyboardInterrupt(): fileobj = StringIO() with assert_raises(KeyboardInterrupt), stdfake() as fake, tee_output(fileobj): raise KeyboardInterrupt("errrt") eq(fake.stdout.getvalue(), "") eq(fake.stderr.getvalue(), "") eq(sanitize_tb(fileobj.getvalue()), "Traceback (most recent call last):\n" " ...\n" "KeyboardInterrupt: errrt\n")
def test_tee_output_with_KeyboardInterrupt(): fileobj = StringIO() with assert_raises(KeyboardInterrupt), stdfake() as fake, tee_output(fileobj): raise KeyboardInterrupt("errrt") eq(fake.stdout.getvalue(), "") eq(fake.stderr.getvalue(), "") eq(sanitize_tb(fileobj.getvalue()), "Traceback (most recent call last):\n" " ...\n" "KeyboardInterrupt: errrt\n")
def _migrate(): with tee_output(summary_file): try: total, skips = migrator.migrate(log_file, **options) if skips: sys.exit(skips) except KeyboardInterrupt: print("stopped by operator") if options.get('date_range'): print("while processing date range {}".format(options['date_range'])) sys.exit(1)
def handle(self, slug, log_dir=None, **options): try: migrator = MIGRATIONS[slug] except KeyError: raise CommandError(USAGE) # drop options not added by this command for name in list(options): if name not in self.option_names: options.pop(name) if not migrator.has_worker_pool: num_workers = options.pop("num_workers") if num_workers != DEFAULT_WORKER_POOL_SIZE: print("--num-workers={} ignored because this migration " "does not use a worker pool".format(num_workers)) elif options["num_workers"] > DEFAULT_BOTOCORE_MAX_POOL_CONNECTIONS: set_max_connections(options["num_workers"]) if "date_range" in options: rng = options["date_range"] if rng is None: options.pop("date_range") else: if "-" not in rng: rng = (None, get_date(rng)) else: rng = rng.split("-") if len(rng) != 2: raise CommandError("bad date range: {}".format(rng)) rng = tuple(get_date(v) for v in rng) # date_range is a tuple containing two date values # a value of None means that side of the range is unbounded options["date_range"] = rng if log_dir is None: summary_file = log_file = None else: now = datetime.utcnow().strftime("%Y%m%dT%H%M%SZ") summary_file = os.path.join(log_dir, "{}-blob-migration-{}-summary.txt".format(slug, now)) log_file = os.path.join(log_dir, "{}-blob-migration-{}.txt".format(slug, now)) assert not os.path.exists(summary_file), summary_file assert not os.path.exists(log_file), log_file with tee_output(summary_file): try: total, skips = migrator.migrate(log_file, **options) if skips: sys.exit(skips) except KeyboardInterrupt: print("stopped by operator") sys.exit(1)
def handle(self, slug, log_dir=None, **options): try: migrator = MIGRATIONS[slug] except KeyError: raise CommandError(USAGE) # drop options not added by this command for name in list(options): if name not in self.option_names: options.pop(name) if not migrator.has_worker_pool: num_workers = options.pop("num_workers") if num_workers != DEFAULT_WORKER_POOL_SIZE: print("--num-workers={} ignored because this migration " "does not use a worker pool".format(num_workers)) elif options["num_workers"] > DEFAULT_BOTOCORE_MAX_POOL_CONNECTIONS: set_max_connections(options["num_workers"]) if "date_range" in options: rng = options["date_range"] if rng is None: options.pop("date_range") else: if "-" not in rng: rng = (None, get_date(rng)) else: rng = rng.split("-") if len(rng) != 2: raise CommandError("bad date range: {}".format(rng)) rng = tuple(get_date(v) for v in rng) # date_range is a tuple containing two date values # a value of None means that side of the range is unbounded options["date_range"] = rng if log_dir is None: summary_file = log_file = None else: now = datetime.utcnow().strftime("%Y%m%dT%H%M%SZ") summary_file = os.path.join( log_dir, "{}-blob-migration-{}-summary.txt".format(slug, now)) log_file = os.path.join( log_dir, "{}-blob-migration-{}.txt".format(slug, now)) assert not os.path.exists(summary_file), summary_file assert not os.path.exists(log_file), log_file with tee_output(summary_file): try: total, skips = migrator.migrate(log_file, **options) if skips: sys.exit(skips) except KeyboardInterrupt: print("stopped by operator") sys.exit(1)
def test_tee_output(self): fileobj = StringIO() with assert_raises(Error), stdfake() as fake, tee_output(fileobj): print("testing...") sys.stderr.write("fail.\n") raise Error("stop") eq(fake.stdout.getvalue(), "testing...\n") eq(fake.stderr.getvalue(), "fail.\n") eq( sanitize_tb(fileobj.getvalue()), "testing...\n" "fail.\n" "Traceback (most recent call last):\n" " ...\n" + "corehq.util.tests.test_teeout.Error: stop\n")
def test_tee_output(): fileobj = StringIO() with assert_raises(Error), stdfake() as fake, tee_output(fileobj): print("testing...") sys.stderr.write("fail.\n") raise Error("stop") eq(fake.stdout.getvalue(), "testing...\n") eq(fake.stderr.getvalue(), "fail.\n") eq(sanitize_tb(fileobj.getvalue()), "testing...\n" "fail.\n" "Traceback (most recent call last):\n" " ...\n" + ("corehq.util.tests.test_teeout.Error" if six.PY3 else "Error") + ": stop\n")