def main(args): tools.disable_buffering() parser = tools.OptionParserWithLogging(usage="%prog <options>", version=__version__, log_file=RUN_ISOLATED_LOG_FILE) data_group = optparse.OptionGroup(parser, "Data source") data_group.add_option("-s", "--isolated", metavar="FILE", help="File/url describing what to map or run") data_group.add_option("-H", "--hash", help="Hash of the .isolated to grab from the hash table") isolateserver.add_isolate_server_options(data_group, True) parser.add_option_group(data_group) cache_group = optparse.OptionGroup(parser, "Cache management") cache_group.add_option("--cache", default="cache", metavar="DIR", help="Cache directory, default=%default") cache_group.add_option( "--max-cache-size", type="int", metavar="NNN", default=20 * 1024 * 1024 * 1024, help="Trim if the cache gets larger than this value, default=%default", ) cache_group.add_option( "--min-free-space", type="int", metavar="NNN", default=2 * 1024 * 1024 * 1024, help="Trim if disk free space becomes lower than this value, " "default=%default", ) cache_group.add_option( "--max-items", type="int", metavar="NNN", default=100000, help="Trim if more than this number of items are in the cache " "default=%default", ) parser.add_option_group(cache_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(data_group, options) if bool(options.isolated) == bool(options.hash): logging.debug("One and only one of --isolated or --hash is required.") parser.error("One and only one of --isolated or --hash is required.") options.cache = os.path.abspath(options.cache) policies = CachePolicies(options.max_cache_size, options.min_free_space, options.max_items) try: # |options.cache| path may not exist until DiskCache() instance is created. cache = DiskCache(options.cache, policies, isolateserver.get_hash_algo(options.namespace)) remote = options.isolate_server or options.indir with isolateserver.get_storage(remote, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test(options.isolated or options.hash, storage, cache, args) except Exception as e: # Make sure any exception is logged. tools.report_error(e) logging.exception(e) return 1
def main(args): tools.disable_buffering() parser = tools.OptionParserWithLogging( usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option( '-s', '--isolated', metavar='FILE', help='File/url describing what to map or run') data_group.add_option( '-H', '--hash', help='Hash of the .isolated to grab from the hash table') isolateserver.add_isolate_server_options(data_group, True) parser.add_option_group(data_group) cache_group = optparse.OptionGroup(parser, 'Cache management') cache_group.add_option( '--cache', default='cache', metavar='DIR', help='Cache directory, default=%default') cache_group.add_option( '--max-cache-size', type='int', metavar='NNN', default=20*1024*1024*1024, help='Trim if the cache gets larger than this value, default=%default') cache_group.add_option( '--min-free-space', type='int', metavar='NNN', default=2*1024*1024*1024, help='Trim if disk free space becomes lower than this value, ' 'default=%default') cache_group.add_option( '--max-items', type='int', metavar='NNN', default=100000, help='Trim if more than this number of items are in the cache ' 'default=%default') parser.add_option_group(cache_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(data_group, options) if bool(options.isolated) == bool(options.hash): logging.debug('One and only one of --isolated or --hash is required.') parser.error('One and only one of --isolated or --hash is required.') options.cache = os.path.abspath(options.cache) policies = CachePolicies( options.max_cache_size, options.min_free_space, options.max_items) algo = isolateserver.get_hash_algo(options.namespace) try: # |options.cache| may not exist until DiskCache() instance is created. cache = DiskCache(options.cache, policies, algo) remote = options.isolate_server or options.indir with isolateserver.get_storage(remote, options.namespace) as storage: return run_tha_test( options.isolated or options.hash, storage, cache, algo, args) except Exception as e: # Make sure any exception is logged. tools.report_error(e) logging.exception(e) return 1
def hash_algo(self): # pylint: disable=R0201 return isolateserver.get_hash_algo(namespace)
TEST_DIR = os.path.dirname(os.path.abspath(__file__)) ROOT_DIR = os.path.dirname(TEST_DIR) sys.path.insert(0, ROOT_DIR) sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party')) from depot_tools import auto_stub import isolateserver import test_utils from utils import threading_utils ALGO = hashlib.sha1 # Tests here assume ALGO is used for default namespaces, check this assumption. assert isolateserver.get_hash_algo('default') is ALGO assert isolateserver.get_hash_algo('default-gzip') is ALGO class TestCase(auto_stub.TestCase): """Mocks out url_open() calls and sys.stdout/stderr.""" def setUp(self): super(TestCase, self).setUp() self.mock(isolateserver.auth, 'ensure_logged_in', lambda _: None) self.mock(isolateserver.net, 'url_open', self._url_open) self.mock(isolateserver.net, 'sleep_before_retry', lambda *_: None) self._lock = threading.Lock() self._requests = [] self.mock(sys, 'stdout', StringIO.StringIO()) self.mock(sys, 'stderr', StringIO.StringIO())
import zlib TEST_DIR = os.path.dirname(os.path.abspath(__file__)) ROOT_DIR = os.path.dirname(TEST_DIR) sys.path.insert(0, ROOT_DIR) sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party')) from depot_tools import auto_stub import isolateserver import test_utils from utils import threading_utils ALGO = hashlib.sha1 # Tests here assume ALGO is used for default namespaces, check this assumption. assert isolateserver.get_hash_algo('default') is ALGO assert isolateserver.get_hash_algo('default-gzip') is ALGO class TestCase(auto_stub.TestCase): """Mocks out url_open() calls and sys.stdout/stderr.""" def setUp(self): super(TestCase, self).setUp() self.mock(isolateserver.auth, 'ensure_logged_in', lambda _: None) self.mock(isolateserver.net, 'url_open', self._url_open) self.mock(isolateserver.net, 'sleep_before_retry', lambda *_: None) self._lock = threading.Lock() self._requests = [] self.mock(sys, 'stdout', StringIO.StringIO()) self.mock(sys, 'stderr', StringIO.StringIO())
def main(args): tools.disable_buffering() parser = tools.OptionParserWithLogging( usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option( '-s', '--isolated', metavar='FILE', help='File/url describing what to map or run') data_group.add_option( '-H', '--hash', help='Hash of the .isolated to grab from the hash table') isolateserver.add_isolate_server_options(data_group, True) parser.add_option_group(data_group) cache_group = optparse.OptionGroup(parser, 'Cache management') cache_group.add_option( '--cache', default='cache', metavar='DIR', help='Cache directory, default=%default') cache_group.add_option( '--max-cache-size', type='int', metavar='NNN', default=20*1024*1024*1024, help='Trim if the cache gets larger than this value, default=%default') cache_group.add_option( '--min-free-space', type='int', metavar='NNN', default=2*1024*1024*1024, help='Trim if disk free space becomes lower than this value, ' 'default=%default') cache_group.add_option( '--max-items', type='int', metavar='NNN', default=100000, help='Trim if more than this number of items are in the cache ' 'default=%default') parser.add_option_group(cache_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(data_group, options) if bool(options.isolated) == bool(options.hash): logging.debug('One and only one of --isolated or --hash is required.') parser.error('One and only one of --isolated or --hash is required.') options.cache = os.path.abspath(options.cache) policies = CachePolicies( options.max_cache_size, options.min_free_space, options.max_items) try: # |options.cache| path may not exist until DiskCache() instance is created. cache = DiskCache( options.cache, policies, isolateserver.get_hash_algo(options.namespace)) remote = options.isolate_server or options.indir with isolateserver.get_storage(remote, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test( options.isolated or options.hash, storage, cache, args) except Exception as e: # Make sure any exception is logged. tools.report_error(e) logging.exception(e) return 1
def main(): tools.disable_buffering() parser = tools.OptionParserWithLogging( usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) group = optparse.OptionGroup(parser, 'Data source') group.add_option( '-s', '--isolated', metavar='FILE', help='File/url describing what to map or run') group.add_option( '-H', '--hash', help='Hash of the .isolated to grab from the hash table') group.add_option( '-I', '--isolate-server', metavar='URL', default='', help='Isolate server to use') group.add_option( '-n', '--namespace', default='default-gzip', help='namespace to use when using isolateserver, default: %default') parser.add_option_group(group) group = optparse.OptionGroup(parser, 'Cache management') group.add_option( '--cache', default='cache', metavar='DIR', help='Cache directory, default=%default') group.add_option( '--max-cache-size', type='int', metavar='NNN', default=20*1024*1024*1024, help='Trim if the cache gets larger than this value, default=%default') group.add_option( '--min-free-space', type='int', metavar='NNN', default=2*1024*1024*1024, help='Trim if disk free space becomes lower than this value, ' 'default=%default') group.add_option( '--max-items', type='int', metavar='NNN', default=100000, help='Trim if more than this number of items are in the cache ' 'default=%default') parser.add_option_group(group) options, args = parser.parse_args() if bool(options.isolated) == bool(options.hash): logging.debug('One and only one of --isolated or --hash is required.') parser.error('One and only one of --isolated or --hash is required.') if args: logging.debug('Unsupported args %s' % ' '.join(args)) parser.error('Unsupported args %s' % ' '.join(args)) if not options.isolate_server: parser.error('--isolate-server is required.') options.cache = os.path.abspath(options.cache) policies = CachePolicies( options.max_cache_size, options.min_free_space, options.max_items) storage = isolateserver.get_storage(options.isolate_server, options.namespace) algo = isolateserver.get_hash_algo(options.namespace) try: # |options.cache| may not exist until DiskCache() instance is created. cache = DiskCache(options.cache, policies, algo) outdir = make_temp_dir('run_tha_test', options.cache) return run_tha_test( options.isolated or options.hash, storage, cache, algo, outdir) except Exception as e: # Make sure any exception is logged. logging.exception(e) return 1
def main(): tools.disable_buffering() parser = tools.OptionParserWithLogging(usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) group = optparse.OptionGroup(parser, 'Data source') group.add_option('-s', '--isolated', metavar='FILE', help='File/url describing what to map or run') group.add_option('-H', '--hash', help='Hash of the .isolated to grab from the hash table') group.add_option('-I', '--isolate-server', metavar='URL', default='', help='Isolate server to use') group.add_option( '-n', '--namespace', default='default-gzip', help='namespace to use when using isolateserver, default: %default') parser.add_option_group(group) group = optparse.OptionGroup(parser, 'Cache management') group.add_option('--cache', default='cache', metavar='DIR', help='Cache directory, default=%default') group.add_option( '--max-cache-size', type='int', metavar='NNN', default=20 * 1024 * 1024 * 1024, help='Trim if the cache gets larger than this value, default=%default') group.add_option( '--min-free-space', type='int', metavar='NNN', default=2 * 1024 * 1024 * 1024, help='Trim if disk free space becomes lower than this value, ' 'default=%default') group.add_option( '--max-items', type='int', metavar='NNN', default=100000, help='Trim if more than this number of items are in the cache ' 'default=%default') parser.add_option_group(group) options, args = parser.parse_args() if bool(options.isolated) == bool(options.hash): logging.debug('One and only one of --isolated or --hash is required.') parser.error('One and only one of --isolated or --hash is required.') if args: logging.debug('Unsupported args %s' % ' '.join(args)) parser.error('Unsupported args %s' % ' '.join(args)) if not options.isolate_server: parser.error('--isolate-server is required.') options.cache = os.path.abspath(options.cache) policies = CachePolicies(options.max_cache_size, options.min_free_space, options.max_items) storage = isolateserver.get_storage(options.isolate_server, options.namespace) algo = isolateserver.get_hash_algo(options.namespace) try: # |options.cache| may not exist until DiskCache() instance is created. cache = DiskCache(options.cache, policies, algo) outdir = make_temp_dir('run_tha_test', options.cache) return run_tha_test(options.isolated or options.hash, storage, cache, algo, outdir) except Exception as e: # Make sure any exception is logged. logging.exception(e) return 1