def test_remove_all(): cases = [ ["http://example.net/?foo=bar", "foo", "http://example.net/"], ["http://example.net/?foo=", "foo", "http://example.net/"], ["http://example.net/?foo", "foo", "http://example.net/"], ["http://example.net/?foo=foo&foo=bar", "foo", "http://example.net/"], ] for url_a, key, url_b in cases: yield check_remove_all, URL(url_a), key, URL(url_b)
def test_path_append(): cases = [ ["http://example.net", "http://example.net", []], ["http://example.net", "http://example.net/", ['']], ["http://example.net", "http://example.net/foo", ['foo']], ["http://example.net", "http://example.net/foo/", ['foo', '']], ["http://example.net", "http://example.net/foo/bar", ['foo', 'bar']], [ "http://example.net", "http://example.net/foo/bar/", ['foo', 'bar', ''] ], [ "http://example.net", "http://example.net/foo/bar/", ['foo', 'bar', ''] ], ["http://example.net/foo", "http://example.net/foo/bar/", ['bar', '']], [ "http://example.net/foo/", "http://example.net/foo/bar/", ['bar', ''] ], ["http://example.net/foo/", "http://example.net/foo/bar", ['bar']], ["http://example.net/foo/", "http://example.net/foo/", ['']], ["http://example.net/foo/", "http://example.net/", ['..']], ["http://example.net/foo", "http://example.net/", ['..']], ["http://example.net/", "http://example.net/", ['..']], ["http://example.net/", "http://example.net/", ['..', '..']], ["http://example.net/", "http://example.net/", ['..', '..', '..']], [ "http://example.net/foo/bar/baz/", "http://example.net/", ['..', '..', '..'] ], [ "http://example.net/foo/bar/baz", "http://example.net/", ['..', '..', '..'] ], [ "http://example.net/foo/bar/baz/", "http://example.net/foo/", ['..', '..'] ], [ "http://example.net/foo/bar/baz", "http://example.net/foo", ['..', '..'] ], ["http://example.net/foo/", "http://example.net/foo/", []], ] for url, predict, args in cases: yield check_path_append, URL(url), URL(predict), args
def check_url(url_string, url_test): url = URL(url_string) for attr in url_test._fields: url_attr = getattr(url, attr) test_value = normalize(getattr(url_test, attr)) assert url_attr == test_value, "({} != {}) [{}] {} != {}".format( url, url_string, attr, url_attr, test_value)
def init_db(db_url): db_url = URL(db_url) DB, migrator = DB_ENGINES.get( db_url.scheme, lambda x: log.fatal("Unable to fund database driver"))(db_url) DB.create_tables([Migrations], safe=True) migrate_db(DB, Migrations, migrator)
def test_win_path(): cases = { ('file://localhost/c|/WINDOWS/clock.avi', "c:\\WINDOWS\\clock.avi"), ('file:///c|/WINDOWS/clock.avi', "c:\\WINDOWS\\clock.avi"), ('file://localhost/c:/WINDOWS/clock.avi', "c:\\WINDOWS\\clock.avi"), ('file:///c:/WINDOWS/clock.avi', "c:\\WINDOWS\\clock.avi"), } for url, expected_result in cases: yield check_win_path, URL(url), expected_result
def test_set(): cases = [ ["http://example.net/", ('foo', 'bar'), "http://example.net/?foo=bar"], [ "http://example.net/", ('foo', (0, 1)), "http://example.net/?foo=0&foo=1" ], [ "http://example.net/", ('foo', ("0", "1")), "http://example.net/?foo=0&foo=1" ], [ "http://example.net/", ('foo', (0, "1")), "http://example.net/?foo=0&foo=1" ], ] for url, args, result in cases: yield check_set, URL(url), args, URL(result)
def test_set(): cases = ( ("http://example.net/", ('foo', '(bar)'), "http://example.net/?foo=(bar)"), ("http://example.net/", ('foo', (0, "(1)")), "http://example.net/?foo=0&foo=(1)"), ) for url, args, result in cases: yield check_set, URL(url, safe_symbols="\\/:()", scheme='http'), args, result
def test_get(): cases = [ ["http://example.net/?foo=bar", 'bar', ['foo'], {}], ["http://example.net/?foo=", '', ['foo'], {'default': None}], ["http://example.net/?foo", None, ['foo'], {}], ["http://example.net/?bar=foo", 'bar', ['foo'], {'default': 'bar'}], ["http://example.net/?bar=foo&bar=baz", ['baz', 'foo'], ['bar'], {'limit': None}], ["http://example.net/?bar=foo&bar=baz", ['foo', 'baz'], ['bar'], {'limit': 2}], ] for url, result, args, kwargs in cases: yield check_get, URL(url), result, args, kwargs
def test_pop(): cases = [ ["http://example.net/?foo=bar", 'bar', ['foo'], {}], ["http://example.net/?foo=", '', ['foo'], { 'default': None }], ["http://example.net/?foo", None, ['foo'], {}], ["http://example.net/?bar=foo", 'bar', ['foo'], { 'default': 'bar' }], ] for url, result, args, kwargs in cases: yield check_pop, URL(url), result, args, kwargs
def test_iter(): cases = [ [ "http://*****:*****@example.net:90/test?foo=bar#spam", ( 'http', 'user', 'password', 'example.net', 90, '/test', (('foo', 'bar'),), 'spam' ) ], ] for url, result in cases: yield check_iter, URL(url), result
def release_data(cls, name, version): info, files = yield [ cls.XMLRPC.release_data(str(name), str(version)), cls.XMLRPC.release_urls(str(name), str(version)) ] download_url = info.get('download_url') if download_url and not files: try: url = URL(download_url) filename = url.path.split('/')[-1] if "#" in filename: filename = filename.split("#")[0] response = yield cls.CLIENT.fetch(download_url) files = [{ 'filename': filename, 'md5_digest': hashlib.md5(response.body).hexdigest(), 'downloads': -1, 'url': download_url, 'size': len(response.body), 'comment_text': None, }] except Exception as e: files = [] log.error("Error when trying to download version %s of package %s", version, name) log.exception(e) else: files = sorted( files, key=lambda x: x['filename'] ) raise Return((info, files))
def check_lt(url_a, url_b, expected_result): assert (URL(url_a) < URL(url_b)) == expected_result, \ "Unexpected result: %r < %r is not %s" % (url_a, url_b, expected_result)
def check_path_append(base, predict, args): base = URL(base) base.path_append(*args) assert base == predict, "%r is not %r" % (base, predict)
def check_ge(url_a, url_b, expected_result): assert (URL(url_a) >= URL(url_b)) == expected_result, \ "Unexpected result: %r <= %r is not %s" % (url_a, url_b, expected_result)
def test_init(): yield lambda: URL("http://localhost/") != URL(None) yield lambda: URL("http://localhost/") != URL('') yield lambda: URL("http://localhost/") != URL() yield lambda: URL("http://localhost/") is not None yield lambda: URL("http://localhost/") != ''
def test_call(): for url_string, _ in EXAMPLES: yield check_call, URL(url_string)
def test_eq_examples(): for url_string, _ in EXAMPLES: yield check_eq_examples, URL(url_string)
def test_str(): for url_string, _ in EXAMPLES: yield check_str, URL(url_string)
def test_copy(): for url_string, _ in EXAMPLES: yield check_copy, URL(url_string)
def check_eq(url_a, url_b, expected_result): url_a, url_b = URL(url_a), URL(url_b) assert (url_a == url_b) == expected_result, \ "Unexpected result: %r == %r is not %s" % (url_a, url_b, expected_result)
def run(): options.parse_command_line() if options.config: options.parse_config_file(options.config) options.storage = os.path.abspath(options.storage) if os.getuid() == 0 and options.user: pw = pwd.getpwnam(options.user) uid, gid = pw.pw_uid, pw.pw_gid log.info("Changind user to %s [%s:%s]", options.user, uid, gid) os.setgid(uid) os.setuid(uid) try: if not all(f(options.storage) for f in (os.path.exists, os.path.isdir)): log.info('Creating new package storage directory: "%s"', options.storage) os.makedirs(options.storage) def on_interrupt(*args): log.warning("Receiving interrupt signal. Application will be stopped.") exit(errno.EINTR) log.debug("Preparing signal handling") for sig in (signal.SIGINT, signal.SIGTERM, signal.SIGQUIT): signal.signal(sig, on_interrupt) def handle_pdb(sig, frame): import pdb pdb.Pdb().set_trace(frame) if options.debug: signal.signal(signal.SIGUSR2, handle_pdb) log.debug("Creating application instance") app = create_app( options.debug, options.secret, options.gzip, ) log.debug("Creating IOLoop instance.") io_loop = IOLoop.current() io_loop.run_sync(lambda: init_db(options.database)) if not (os.path.exists(options.cache_dir) and os.path.isdir(options.cache_dir)): os.makedirs(options.cache_dir) Cache.CACHE_DIR = options.cache_dir log.info("Init thread pool with %d threads", options.pool_size) handlers.base.BaseHandler.THREAD_POOL = futures.ThreadPoolExecutor(options.pool_size) AsyncHTTPClient.configure(None, max_clients=options.max_http_clients) proxy_url = URL(os.getenv('{0}_proxy'.format(options.pypi_server.scheme))) if proxy_url: log.debug("Configuring for proxy: %s", proxy_url) AsyncHTTPClient.configure( 'tornado.curl_httpclient.CurlAsyncHTTPClient', defaults={ 'proxy_host': proxy_url.host, 'proxy_port': proxy_url.port, 'proxy_username': proxy_url.user, 'proxy_password': proxy_url.password, } ) PYPIClient.configure( options.pypi_server, handlers.base.BaseHandler.THREAD_POOL ) if options.pypi_proxy: pypi_updater = PeriodicCallback(PYPIClient.packages, HOUR * 1000, io_loop) io_loop.add_callback(PYPIClient.packages) io_loop.add_callback(pypi_updater.start) log.info("Starting server http://%s:%d/", options.address, options.port) http_server = HTTPServer(app, xheaders=options.proxy_mode) http_server.listen(options.port, address=options.address) log.debug('Setting "%s" as storage', options.storage) PackageFile.set_storage(options.storage) log.debug("Starting main loop") io_loop.start() except Exception as e: log.fatal("Exception on main loop:") log.exception(e) exit(1) else: exit(0)
os.path.join(os.path.abspath(os.path.curdir), 'packages') ) ) define( "storage", help="Packages storage (default $CWD/packages) [ENV:STORAGE]", type=str, default=default_storage ) define( "database", help="Application database (default sqlite:///{storage}/metadata.db) [ENV:DB]", type=URL, default=os.getenv( "DB", URL( "sqlite://{0}".format("/".join( os.path.split(os.path.join(default_storage, 'metadata.db')) )) ) ) ) define("max_http_clients", help="Maximum HTTP Client instances for proxy requests (default 25) [ENV:MAX_CLIENTS]", default=int(os.getenv("MAX_CLIENTS", '25')), type=int) define("pypi_server", help="PYPI service url. Using for proxy. (default https://pypi.python.org/) [ENV:PYPY_SERVER]", default=URL(os.getenv("PYPI_SERVER", 'https://pypi.python.org/')), type=URL) default_cache_dir = os.path.join(tempfile.gettempdir(), 'pypi-server-cache') define(
help="Packages storage (default $CWD/packages) [ENV:STORAGE]", type=str, default=os.path.abspath( os.getenv("STORAGE", os.path.join(os.path.abspath(os.path.curdir), 'packages')))) define( "database", help= "Application database (default sqlite:///{storage}/metadata.db) [ENV:DB]", type=URL, default=os.getenv( "DB", URL("sqlite://{0}".format("/".join( os.path.split( os.path.join(os.path.abspath(os.path.curdir), 'packages', 'metadata.db'))))))) define( "max_http_clients", help= "Maximum HTTP Client instances for proxy requests (default 25) [ENV:MAX_CLIENTS]", default=int(os.getenv("MAX_CLIENTS", '25')), type=int) define( "pypi_server", help= "PYPI service url. Using for proxy. (default https://pypi.python.org/) [ENV:PYPY_SERVER]", default=URL(os.getenv("PYPI_SERVER", 'https://pypi.python.org/')), type=URL)