def test_parse_map_lua_perl(self): cfg = NginxConfigParser(map_lua_perl) cfg.parse() tree = cfg.simplify() # common structure assert_that(tree, has_key('http')) # http http = tree['http'] assert_that(http, has_key('server')) assert_that(http, has_key('map')) assert_that(http, has_key('perl_set')) # lua for server in http['server']: if server.get('server_name', '') == '127.0.0.1': assert_that(server, has_item('lua_shared_dict')) for location, data in server['location'].iteritems(): if location == '= /some/': assert_that(data, has_item('rewrite_by_lua')) # maps assert_that(http['map']['$http_user_agent $device'], has_key('~*Nexus\\ One|Nexus\\ S')) assert_that(http['map']['$http_referer $bad_referer'], has_key('"~* move-"'))
def test_parse_complex(self): cfg = NginxConfigParser(complex_config) cfg.parse() tree = cfg.simplify() indexed_tree = cfg.tree # common structure assert_that(tree, has_key('http')) assert_that(tree, has_key('events')) # http http = tree['http'] assert_that(http, has_key('server')) assert_that(http, has_key('upstream')) assert_that(http, has_key('include')) assert_that(http['server'], is_(instance_of(list))) assert_that(http['server'], has_length(11)) # upstream upstream = http['upstream'] assert_that(upstream, has_length(2)) # ifs for server in http['server']: if server.get('listen', '') == '127.0.0.3:10122': assert_that(server, has_item('if')) # check index tree x1_location_index = indexed_tree['http'][0]['server'][0][0]['location']['/'][1] x2_return_index = indexed_tree['http'][0]['server'][1][0]['location']['/'][0]['return'][1] assert_that(cfg.index[x1_location_index], equal_to((0, 8))) # root file, line number 8 assert_that(cfg.index[x2_return_index], equal_to((0, 9))) # root file, line number 9
def test_parse_ssl(self): """ This test case specifically checks to see that none of the excluded directives (SSL focused) are parsed. """ cfg = NginxConfigParser(ssl_broken_config) cfg.parse() subtree = cfg.simplify() # common structure assert_that(subtree, contains( has_entries({'directive': 'http'}) )) # http http = subtree[0]['block'] assert_that(http, contains( has_entries({'directive': 'server'}), has_entries({'directive': 'server'}) )) # ssl server ssl_server = http[1]['block'] # check that ignored directives were actually ignored for directive in IGNORED_DIRECTIVES: assert_that(ssl_server, not_(has_item(has_entries({'directive': directive})))) assert_that(ssl_server, has_item( has_entries({'directive': 'ssl_certificate', 'args': ['certs.d/example.cert']}) ))
def test_sub_filter(self): cfg = NginxConfigParser(sub_filter_config) cfg.parse() subtree = cfg.simplify() # common structure assert_that(subtree, contains( has_entries({'directive': 'http'}) )) http = subtree[0]['block'] assert_that(http, contains( has_entries({'directive': 'gzip'}), has_entries({'directive': 'gzip_disable'}), has_entries({'directive': 'sub_filter', 'args': ['foo', 'bar']}), has_entries({'directive': 'sub_filter', 'args': ['https://foo.example.com/1', 'https://bar.example.com/1']}), has_entries({'directive': 'sub_filter', 'args': ['https://foo.example.com/2', 'https://bar.example.com/2']}), has_entries({'directive': 'sub_filter', 'args': ['https://foo.example.com/3', 'https://bar.example.com/3']}), has_entries({ 'directive': 'sub_filter', 'args': [ '</body>', '<p style="position: fixed;top:\n 60px;width:100%;;background-color: #f00;background-color:\n rgba(255,0,0,0.5);color: #000;text-align: center;font-weight:\n bold;padding: 0.5em;z-index: 1;">Test</p></body>' ] }) ))
def test_proxy_pass(self): cfg = NginxConfigParser(proxy_pass_config) cfg.parse() tree = cfg.simplify() assert_that(tree['http']['proxy_pass'], equal_to('$scheme://${scheme}site.com_backend'))
def test_escaped_string(self): cfg = NginxConfigParser(escaped_string_config) cfg.parse() assert_that(cfg.errors, empty()) subtree = cfg.simplify() assert_that(subtree, contains( has_entries({ 'directive': 'http', 'block': contains( has_entries({ 'directive': 'server', 'block': contains( has_entries({ 'directive': 'add_header', 'args': ['LinkOne', '<https://$http_host$request_uri>; rel="foo"'], }), has_entries({ 'directive': 'add_header', 'args': ['LinkTwo', "<https://$http_host$request_uri>; rel='bar'"], }) ) }) ) }) ))
def test_proxy_pass(self): cfg = NginxConfigParser(proxy_pass_config) cfg.parse() subtree = cfg.simplify() # common structure assert_that(subtree, contains( has_entries({'directive': 'http'}) )) http = subtree[0]['block'] assert_that(http, contains( has_entries({'directive': 'gzip'}), has_entries({'directive': 'gzip_disable'}), has_entries({'directive': 'server'}) )) server = http[2]['block'] assert_that(server, contains( has_entries({'directive': 'location', 'args': ['/']}) )) location = server[0]['block'] assert_that(location, contains( has_entries({'directive': 'proxy_pass', 'args': ['$scheme://${scheme}site.com_backend']}) ))
def test_parse_map_lua_perl(self): cfg = NginxConfigParser(map_lua_perl) cfg.parse() tree = cfg.simplify() # common structure assert_that(tree, has_key('http')) # http http = tree['http'] assert_that(http, has_key('server')) assert_that(http, has_key('map')) assert_that(http, has_key('perl_set')) # lua for server in http['server']: if server.get('server_name', '') == '127.0.0.1': assert_that(server, has_item('lua_shared_dict')) for location, data in server['location'].iteritems(): if location == '= /some/': assert_that(data, has_item('rewrite_by_lua')) # maps assert_that(http['map']['$http_user_agent $device'], has_key('~*Nexus\\ One|Nexus\\ S')) assert_that(http['map']['$http_referer $bad_referer'], has_key('~* move-'))
def test_parse_ssl_simple_config(self): cfg = NginxConfigParser(ssl_simple_config) cfg.parse() tree = cfg.simplify() assert_that(tree, has_key('http')) http = tree['http'] assert_that(http, has_key('server')) server = http['server'] # ssl for server_dict in server: # check that all server dicts don't have ignored directives for directive in IGNORED_DIRECTIVES: assert_that(server_dict, is_not(has_item(directive))) # for specifically the ssl server block, check ssl settings if server_dict.get('server_name' ) == 'example.com' and 'if' not in server_dict: assert_that(server_dict, has_item('ssl_certificate')) assert_that(server_dict['ssl_certificate'], equal_to('certs.d/example.com.crt')) ssl_certificates = cfg.ssl_certificates assert_that(len(ssl_certificates), equal_to(1))
def test_parse_huge(self): cfg = NginxConfigParser(huge_config) cfg.parse() tree = cfg.simplify() indexed_tree = cfg.tree # common structure assert_that(tree, has_key('http')) assert_that(tree, has_key('events')) # http http = tree['http'] assert_that(http, has_key('server')) assert_that(http, has_key('include')) assert_that(http['server'], is_(instance_of(list))) assert_that(http['server'], has_length(8)) # map http_map = http['map'] assert_that(http_map, equal_to({'$dirname $diruri': {'default': '"dirindex.html"', 'include': ['"dir.map"']}})) # check index tree books_location_index = indexed_tree['http'][0]['server'][2][0]['location']['/books/'][1] assert_that(cfg.index[books_location_index], equal_to((0, 134))) # root file, line number 134
def test_parse_huge(self): cfg = NginxConfigParser(huge_config) cfg.parse() tree = cfg.simplify() indexed_tree = cfg.tree # common structure assert_that(tree, has_key('http')) assert_that(tree, has_key('events')) # http http = tree['http'] assert_that(http, has_key('server')) assert_that(http, has_key('include')) assert_that(http['server'], is_(instance_of(list))) assert_that(http['server'], has_length(8)) # map http_map = http['map'] assert_that(http_map, equal_to({'$dirname $diruri': {'default': 'dirindex.html', 'include': ['dir.map']}})) # check index tree books_location_index = indexed_tree['http'][0]['server'][2][0]['location']['/books/'][1] assert_that(cfg.index[books_location_index], equal_to((0, 135))) # root file, line number 135 # check directory map assert_that(cfg.directory_map, has_key('/amplify/test/fixtures/nginx/huge/')) for key in ('info', 'files'): assert_that(cfg.directory_map['/amplify/test/fixtures/nginx/huge/'], has_key(key)) files = cfg.directory_map['/amplify/test/fixtures/nginx/huge/']['files'] assert_that(files, has_length(7))
def test_parse_simple(self): cfg = NginxConfigParser(simple_config) cfg.parse() tree = cfg.simplify() indexed_tree = cfg.tree # common structure assert_that(tree, has_key('http')) assert_that(tree, has_key('events')) # http http = tree['http'] assert_that(http, has_key('server')) assert_that(http, has_key('types')) assert_that(http, has_key('include')) assert_that(http, has_key('add_header')) assert_that(http['server'], is_(instance_of(list))) assert_that(http['server'], has_length(2)) # server server = http['server'][1] assert_that(server, has_key('listen')) assert_that(server, has_key('location')) assert_that(server, has_key('server_name')) assert_that( server['server_name'], equal_to('127.0.0.1 "~^([a-z]{2})?\.?test\.nginx\.org" "~^([a-z]{2})?\.?beta\.nginx\.org"') ) assert_that(server['location'], is_(instance_of(dict))) # location location = server['location'] assert_that(location, has_key('/basic_status')) # nested location assert_that(http['server'][0]['location']['/'], has_key('location')) # included mimes mimes = http['types'] assert_that(mimes, has_key('application/java-archive')) # add_header add_header = http['add_header'] assert_that(add_header, contains_string('"max-age=31536000; includeSubdomains; ;preload"')) # check index tree worker_connections_index = indexed_tree['events'][0]['worker_connections'][1] basic_status_index = indexed_tree['http'][0]['server'][1][0]['location']['/basic_status'][1] stub_status_in_basic_index = indexed_tree['http'][0]['server'][1][0]['location']['/basic_status'][0]['stub_status'][1] plus_status_in_basic_index = indexed_tree['http'][0]['server'][1][0]['location']['/plus_status'][0]['status'][1] rewrite_in_basic_index = indexed_tree['http'][0]['server'][1][0]['rewrite'][1] proxy_pass_index = indexed_tree['http'][0]['server'][0][0]['location']['/'][0]['proxy_pass'][1] assert_that(cfg.index[worker_connections_index], equal_to((0, 6))) # root file, line number 6 assert_that(cfg.index[basic_status_index], equal_to((0, 67))) # root file, line number 67 assert_that(cfg.index[stub_status_in_basic_index], equal_to((0, 69))) # root file, line number 69 assert_that(cfg.index[plus_status_in_basic_index], equal_to((0, 72))) # root file, line number 72 assert_that(cfg.index[rewrite_in_basic_index]), equal_to((0, 75)) # root file, line number 75 assert_that(cfg.index[proxy_pass_index], equal_to((2, 13))) # third loaded file, line number 13
def test_log_format_unicode_quote(self): cfg = NginxConfigParser(log_format_unicode_quote) cfg.parse() tree = cfg.simplify() format = tree['http']['log_format']['foo'] assert_that( format, equal_to( 'site="$server_name" server="$host\xe2\x80\x9d uri="uri"'))
def test_parse_ssl_simple_config(self): cfg = NginxConfigParser(ssl_simple_config) cfg.parse() subtree = cfg.simplify() # common structure assert_that(subtree, contains( has_entries({'directive': 'user'}), has_entries({'directive': 'worker_processes'}), has_entries({'directive': 'pid'}), has_entries({'directive': 'events'}), has_entries({'directive': 'http'}) )) http = subtree[4]['block'] assert_that(http, contains( has_entries({'directive': 'sendfile'}), has_entries({'directive': 'tcp_nopush'}), has_entries({'directive': 'tcp_nodelay'}), has_entries({'directive': 'keepalive_timeout'}), has_entries({'directive': 'types_hash_max_size'}), has_entries({'directive': 'include', 'args': ['mime.types']}), has_entries({'directive': 'types', 'block': has_length(70)}), has_entries({'directive': 'default_type'}), has_entries({'directive': 'proxy_buffering'}), has_entries({'directive': 'log_format'}), has_entries({'directive': 'access_log'}), has_entries({'directive': 'error_log'}), has_entries({'directive': 'gzip'}), has_entries({'directive': 'gzip_disable'}), has_entries({'directive': 'include', 'args': ['conf.d/*.conf']}), # from conf.d/something.conf has_entries({'directive': 'upstream'}), has_entries({'directive': 'server', 'block': has_item(has_entries({'directive': 'listen', 'args': ['4000']}))}), # from conf.d/ssl.conf has_entries({'directive': 'server', 'block': has_item(has_entries({'directive': 'listen', 'args': ['80']}))}), has_entries({'directive': 'server', 'block': has_item(has_entries({'directive': 'listen', 'args': ['443', 'ssl']}))}), # back in nginx.conf has_entries({'directive': 'server', 'block': has_item(has_entries({'directive': 'listen', 'args': ['81', 'default_server']}))}), )) # check that ignored directives were actually ignored in all servers for server_directive in http[16:20]: server = server_directive['block'] for directive in IGNORED_DIRECTIVES: assert_that(server, not_(has_item(has_entries({'directive': directive})))) # check ssl settings for specifically the ssl server block ssl_server = http[18]['block'] assert_that(ssl_server, has_items( has_entries({'directive': 'server_name', 'args': ['example.com']}), has_entries({'directive': 'ssl_certificate', 'args': ['certs.d/example.com.crt']}) )) assert_that(cfg.ssl_certificates, has_length(1))
def test_sub_filter(self): cfg = NginxConfigParser(sub_filter_config) cfg.parse() tree = cfg.simplify() assert_that( tree['http']['sub_filter'], equal_to( '\'</body>\'\'<p style="position: fixed;top:\n 60px;width:100%;;background-color: #f00;background-color:\n rgba(255,0,0,0.5);color: #000;text-align: center;font-weight:\n bold;padding: 0.5em;z-index: 1;">Test</p></body>\'' ))
def test_parse_huge(self): cfg = NginxConfigParser(huge_config) cfg.parse() tree = cfg.simplify() indexed_tree = cfg.tree # common structure assert_that(tree, has_key('http')) assert_that(tree, has_key('events')) # http http = tree['http'] assert_that(http, has_key('server')) assert_that(http, has_key('include')) assert_that(http['server'], is_(instance_of(list))) assert_that(http['server'], has_length(8)) # map http_map = http['map'] assert_that(http_map, equal_to({'$dirname $diruri': {'default': '"dirindex.html"', 'include': ['"dir.map"']}})) # check index tree books_location_index = indexed_tree['http'][0]['server'][2][0]['location']['/books/'][1] assert_that(cfg.index[books_location_index], equal_to((0, 135))) # root file, line number 135 # check directory map assert_that(cfg.directory_map, equal_to({ '/amplify/test/fixtures/nginx/huge/': { 'info': {'permissions': '0755', 'mtime': 1463763109, 'size': 136}, 'files': { '/amplify/test/fixtures/nginx/huge/mime.types': { 'info': {'index': 1, 'permissions': '0644', 'mtime': 1463763109, 'lines': 77, 'size': 3870} }, '/amplify/test/fixtures/nginx/huge/gce-public-networks.conf': { 'error': 'IOError: No such file or directory' }, '/amplify/test/fixtures/nginx/huge/ec2-public-networks.conf': { 'error': 'IOError: No such file or directory' }, '/amplify/test/fixtures/nginx/huge/azure-public-networks.conf': { 'error': 'IOError: No such file or directory' }, '/amplify/test/fixtures/nginx/huge/mime.types2': { 'error': 'IOError: No such file or directory' }, '/amplify/test/fixtures/nginx/huge/nginx.conf': { 'info': {'index': 0, 'permissions': '0644', 'mtime': 1463763109, 'lines': 364, 'size': 8892} }, '/amplify/test/fixtures/nginx/huge/dir.map': { 'error': 'IOError: No such file or directory' } } } }))
def test_parse_simple(self): cfg = NginxConfigParser(simple_config) cfg.parse() tree = cfg.simplify() indexed_tree = cfg.tree # common structure assert_that(tree, has_key('http')) assert_that(tree, has_key('events')) # http http = tree['http'] assert_that(http, has_key('server')) assert_that(http, has_key('types')) assert_that(http, has_key('include')) assert_that(http, has_key('add_header')) assert_that(http['server'], is_(instance_of(list))) assert_that(http['server'], has_length(2)) # server server = http['server'][1] assert_that(server, has_key('listen')) assert_that(server, has_key('location')) assert_that(server, has_key('server_name')) assert_that( server['server_name'], equal_to('127.0.0.1 "~^([a-z]{2})?\.?test\.nginx\.org" "~^([a-z]{2})?\.?beta\.nginx\.org"') ) assert_that(server['location'], is_(instance_of(dict))) # location location = server['location'] assert_that(location, has_key('/basic_status')) # nested location assert_that(http['server'][0]['location']['/'], has_key('location')) # included mimes mimes = http['types'] assert_that(mimes, has_key('application/java-archive')) # add_header add_header = http['add_header'] assert_that(add_header, contains_string('"max-age=31536000; includeSubdomains; ;preload"')) # check index tree worker_connections_index = indexed_tree['events'][0]['worker_connections'][1] basic_status_index = indexed_tree['http'][0]['server'][1][0]['location']['/basic_status'][1] stub_status_in_basic_index = indexed_tree['http'][0]['server'][1][0]['location']['/basic_status'][0]['stub_status'][1] proxy_pass_index = indexed_tree['http'][0]['server'][0][0]['location']['/'][0]['proxy_pass'][1] assert_that(cfg.index[worker_connections_index], equal_to((0, 6))) # root file, line number 6 assert_that(cfg.index[basic_status_index], equal_to((0, 67))) # root file, line number 65 assert_that(cfg.index[stub_status_in_basic_index], equal_to((0, 69))) # root file, line number 66 assert_that(cfg.index[proxy_pass_index], equal_to((2, 13))) # third loaded file, line number 13
def test_sub_filter(self): cfg = NginxConfigParser(sub_filter_config) cfg.parse() tree = cfg.simplify() assert_that( tree['http']['sub_filter'], equal_to( '\'</body>\'\'<p style="position: fixed;top:\n 60px;width:100%;;background-color: #f00;background-color:\n rgba(255,0,0,0.5);color: #000;text-align: center;font-weight:\n bold;padding: 0.5em;z-index: 1;">Test</p></body>\'' ) )
def test_proxy_pass(self): cfg = NginxConfigParser(proxy_pass_config) cfg.parse() tree = cfg.simplify() http = tree['http'] server = http['server'][0] location = server['location']['/'] assert_that(location['proxy_pass'], equal_to('$scheme://${scheme}site.com_backend'))
def test_parse_json(self): """ Test json config format. This is the first test investigating Parser auto-escape problems. """ cfg = NginxConfigParser(json_config) cfg.parse() tree = cfg.simplify() for log_format in tree['http']['log_format'].itervalues(): assert_that(log_format.find('\\'), equal_to(-1))
def test_parse_bad_access_and_error_log(self): """ Test case for ignoring access_log and error_log edge cases. """ cfg = NginxConfigParser(bad_log_directives_config) cfg.parse() tree = cfg.simplify() assert_that(tree, not_(has_key('access_log'))) assert_that(tree, not_(has_key('error_log')))
def test_parse_bad_access_and_error_log(self): """ Test case for ignoring access_log and error_log edge cases. """ cfg = NginxConfigParser(bad_log_directives_config) cfg.parse() tree = cfg.simplify() assert_that(tree, not has_key('access_log')) assert_that(tree, not has_key('error_log'))
def test_escaped_string(self): cfg = NginxConfigParser(escaped_string_config) cfg.parse() assert_that(cfg.errors, empty()) tree = cfg.simplify() add_header = tree['http']['server'][0]['add_header'] assert_that(add_header, contains( r'LinkOne "<https://$http_host$request_uri>; rel=\"foo\""', r"LinkTwo '<https://$http_host$request_uri>; rel=\'bar\''" ))
def test_parse_windows(self): """ Test that windows style line endings are replaces with Unix style ones for parser. """ cfg = NginxConfigParser(windows_config) cfg.parse() subtree = cfg.simplify() # common structure assert_that(subtree, contains( has_entries({'directive': 'user'}), has_entries({'directive': 'worker_processes'}), has_entries({'directive': 'worker_rlimit_nofile'}), has_entries({'directive': 'events'}), has_entries({'directive': 'error_log'}), has_entries({'directive': 'pid'}), has_entries({'directive': 'http'}) )) http = subtree[6]['block'] assert_that(http, contains( has_entries({'directive': 'server_tokens'}), has_entries({'directive': 'include', 'args': ['mime.types']}), has_entries({'directive': 'default_type'}), has_entries({'directive': 'charset_types'}), has_entries({'directive': 'log_format'}), has_entries({'directive': 'access_log'}), has_entries({'directive': 'keepalive_timeout'}), has_entries({'directive': 'sendfile'}), has_entries({'directive': 'tcp_nopush'}), has_entries({'directive': 'gzip'}), has_entries({'directive': 'gzip_comp_level'}), has_entries({'directive': 'gzip_min_length'}), has_entries({'directive': 'gzip_proxied'}), has_entries({'directive': 'gzip_vary'}), has_entries({ 'directive': 'gzip_types', 'args': contains( 'application/atom+xml', 'application/javascript', 'application/json', 'application/ld+json', 'application/manifest+json', 'application/rss+xml', 'application/vnd.geo+json', 'application/vnd.ms-fontobject', 'application/x-font-ttf', 'application/x-web-app-manifest+json', 'application/xhtml+xml', 'application/xml', 'font/opentype', 'image/bmp', 'image/svg+xml', 'image/x-icon', 'text/cache-manifest', 'text/css', 'text/plain', 'text/vcard', 'text/vnd.rim.location.xloc', 'text/vtt', 'text/x-component', 'text/x-cross-domain-policy' ) }), has_entries({'directive': 'include', 'args': ['sites-enabled/*']}), ))
def test_sub_filter(self): cfg = NginxConfigParser(sub_filter_config) cfg.parse() tree = cfg.simplify() assert_that( tree['http']['sub_filter'], contains( 'foo bar', 'https://foo.example.com/1 https://bar.example.com/1', 'https://foo.example.com/2 https://bar.example.com/2', 'https://foo.example.com/3 https://bar.example.com/3', '</body> \'<p style="position: fixed;top:\n 60px;width:100%;;background-color: #f00;background-color:\n rgba(255,0,0,0.5);color: #000;text-align: center;font-weight:\n bold;padding: 0.5em;z-index: 1;">Test</p></body>\'' ))
def test_parse_ssl(self): """ This test case specifically checks to see that none of the excluded directives (SSL focused) are parsed. """ cfg = NginxConfigParser(ssl_broken_config) cfg.parse() tree = cfg.simplify() assert_that(tree, has_key('server')) # ssl for directive in IGNORED_DIRECTIVES: assert_that(tree['server'][1], is_not(has_item(directive))) assert_that(tree['server'][1], has_item('ssl_certificate')) assert_that(tree['server'][1]['ssl_certificate'], equal_to('certs.d/example.cert'))
def test_parse_ssl_not_ignored(self): """ This test case specifically checks to see that excluded directives (SSL focused) are parsed for controller agent """ cfg = NginxConfigParser(ssl_broken_config) cfg.parse() subtree = cfg.simplify() http = subtree[0]['block'] ssl_server = http[1]['block'] # check that ignored directives were not ignored # ssl_certificate_key is one of the IGNORED_DIRECTIVE assert_that(ssl_server, has_item( has_entries({'directive': 'ssl_certificate_key'}), ))
def test_parse_rewrites(self): cfg = NginxConfigParser(rewrites_config) cfg.parse() subtree = cfg.simplify() # common structure assert_that(subtree, contains( has_entries({'directive': 'user'}), has_entries({'directive': 'worker_processes'}), has_entries({'directive': 'worker_rlimit_nofile'}), has_entries({'directive': 'error_log'}), has_entries({'directive': 'pid'}), has_entries({'directive': 'events'}), has_entries({'directive': 'http'}) )) # http http = subtree[6]['block'] assert_that(http, contains( has_entries({'directive': 'include', 'args': ['mime.types']}), has_entries({'directive': 'default_type'}), has_entries({'directive': 'access_log'}), has_entries({'directive': 'proxy_cache_path'}), has_entries({'directive': 'fastcgi_cache_path'}), has_entries({'directive': 'sendfile'}), has_entries({'directive': 'keepalive_timeout'}), has_entries({'directive': 'tcp_nodelay'}), has_entries({'directive': 'fastcgi_buffers'}), has_entries({'directive': 'fastcgi_buffering'}), has_entries({'directive': 'fastcgi_buffer_size'}), has_entries({'directive': 'proxy_buffers'}), has_entries({'directive': 'proxy_buffer_size'}), has_entries({'directive': 'upstream'}), has_entries({'directive': 'gzip'}), has_entries({'directive': 'log_format'}), has_entries({'directive': 'include', 'args': ['sites-enabled/*.conf']}), has_entries({ 'directive': 'server', 'block': has_items( has_entries({'directive': 'server_name', 'args': ['mb.some.org', 'localhost', 'melchior', 'melchior.some.org']}), has_entries({'directive': 'include', 'args': ['sites-enabled/rewrites']}), has_entries({'directive': 'rewrite'}) ) }) ))
def test_parse_rewrites(self): cfg = NginxConfigParser(rewrites_config) cfg.parse() tree = cfg.simplify() # common structure assert_that(tree, has_key('http')) # http http = tree['http'] assert_that(http, has_key('server')) # rewrites for server in http['server']: if server.get('server_name', '') == 'mb.some.org localhost melchior melchior.some.org': assert_that(server, has_item('rewrite'))
def test_parse_ssl_simple_config_not_ignored(self): cfg = NginxConfigParser(ssl_simple_config) cfg.parse() subtree = cfg.simplify() http = subtree[4]['block'] # check that ignored directives were not ignored # ssl_certificate_key, ssl_trusted_certificate are some of the the IGNORED_DIRECTIVE # check ssl settings for specifically the ssl server block ssl_server = http[18]['block'] assert_that(ssl_server, has_items( has_entries({'directive': 'ssl_certificate_key'}), has_entries({'directive': 'ssl_trusted_certificate'}) )) assert_that(cfg.ssl_certificates, has_length(1))
def test_parse_complex(self): cfg = NginxConfigParser(complex_config) cfg.parse() tree = cfg.simplify() # common structure assert_that(tree, contains( has_entries({'directive': 'worker_processes'}), has_entries({'directive': 'daemon'}), has_entries({'directive': 'events'}), has_entries({'directive': 'http'}), )) # http http = tree[3]['block'] assert_that(http, contains( # main config file has_entries({'directive': 'server', 'block': has_item(has_entries({'directive': 'listen', 'args': ['127.1.0.1']}))}), has_entries({'directive': 'server', 'block': has_item(has_entries({'directive': 'listen', 'args': ['127.2.0.1:10122']}))}), has_entries({'directive': 'server', 'block': has_item(has_entries({'directive': 'listen', 'args': ['127.3.0.1:10122']}))}), has_entries({'directive': 'server', 'block': has_item(has_entries({'directive': 'listen', 'args': ['127.4.0.1:10122']}))}), has_entries({'directive': 'upstream'}), has_entries({'directive': 'resolver'}), # part.conf has_entries({'directive': 'include', 'args': ['part.conf']}), has_entries({'directive': 'server', 'block': has_item(has_entries({'directive': 'listen', 'args': ['127.0.0.1:1234']}))}), # part2.conf has_entries({'directive': 'include', 'args': ['part2.conf']}), has_entries({'directive': 'server', 'block': has_item(has_entries({'directive': 'listen', 'args': ['127.6.1.1:10122']}))}), has_entries({'directive': 'server', 'block': has_item(has_entries({'directive': 'listen', 'args': ['127.7.2.1:10122']}))}), has_entries({'directive': 'server', 'block': has_item(has_entries({'directive': 'listen', 'args': ['127.8.3.1:10122']}))}), has_entries({'directive': 'server', 'block': has_item(has_entries({'directive': 'listen', 'args': ['127.9.4.1:10122']}))}), # back to the main config has_entries({'directive': 'server', 'block': has_item(has_entries({'directive': 'listen', 'args': ['127.0.0.1:10122']}))}), has_entries({'directive': 'types'}), has_entries({'directive': 'upstream'}), has_entries({'directive': 'server', 'block': has_item(has_entries({'directive': 'listen', 'args': ['127.0.0.3:10122']}))}), has_entries({'directive': 'map'}), has_entries({'directive': 'upstream'}), ))
def test_parse_ssl_simple_config(self): cfg = NginxConfigParser(ssl_simple_config) cfg.parse() tree = cfg.simplify() assert_that(tree, has_key('http')) http = tree['http'] assert_that(http, has_key('server')) server = http['server'] # ssl for directive in IGNORED_DIRECTIVES: assert_that(server[2], is_not(has_item(directive))) assert_that(server[2], has_item('ssl_certificate')) assert_that(server[2]['ssl_certificate'], equal_to('certs.d/example.com.crt')) ssl_certificates = cfg.ssl_certificates assert_that(len(ssl_certificates), equal_to(1))
def test_parse_windows(self): """ Test that windows style line endings are replaces with Unix style ones for parser. """ cfg = NginxConfigParser(windows_config) cfg.parse() tree = cfg.simplify() assert_that( tree['http']['gzip_types'], equal_to( 'application/atom+xml application/javascript application/json application/ld+json ' 'application/manifest+json application/rss+xml application/vnd.geo+json ' 'application/vnd.ms-fontobject application/x-font-ttf application/x-web-app-manifest+json ' 'application/xhtml+xml application/xml font/opentype image/bmp ' 'image/svg+xml image/x-icon text/cache-manifest text/css text/plain ' 'text/vcard text/vnd.rim.location.xloc text/vtt text/x-component ' 'text/x-cross-domain-policy'))
def test_parse_windows(self): """ Test that windows style line endings are replaces with Unix style ones for parser. """ cfg = NginxConfigParser(windows_config) cfg.parse() tree = cfg.simplify() assert_that( tree['http']['gzip_types'], equal_to( 'application/atom+xml\n application/javascript\n application/json\n application/ld+json\n' \ ' application/manifest+json\n application/rss+xml\n application/vnd.geo+json\n ' \ 'application/vnd.ms-fontobject\n application/x-font-ttf\n application/x-web-app-manifest+json\n'\ ' application/xhtml+xml\n application/xml\n font/opentype\n image/bmp\n ' \ 'image/svg+xml\n image/x-icon\n text/cache-manifest\n text/css\n text/plain\n ' \ 'text/vcard\n text/vnd.rim.location.xloc\n text/vtt\n text/x-component\n ' \ ' text/x-cross-domain-policy' ) )
def test_log_format_string_concat(self): cfg = NginxConfigParser(log_format_string_concat) cfg.parse() tree = cfg.simplify() formats = tree['http']['log_format'] expected = ( '$remote_addr - $remote_user [$time_local] "$request" ' '$status $body_bytes_sent "$http_referer" ' '"$http_user_agent" "$http_x_forwarded_for" ' '"$host" sn="$server_name" ' 'rt=$request_time ' 'ua="$upstream_addr" us="$upstream_status" ' 'ut="$upstream_response_time" ul="$upstream_response_length" ' 'cs=$upstream_cache_status') assert_that(formats, has_length(2)) assert_that(formats, has_items('with_newlines', 'without_newlines')) assert_that(formats['with_newlines'], equal_to(expected)) assert_that(formats['without_newlines'], equal_to(expected))
def test_parse_json(self): """ Test json config format. This is the first test investigating Parser auto-escape problems. """ cfg = NginxConfigParser(json_config) cfg.parse() subtree = cfg.simplify() # common structure assert_that(subtree, contains( has_entries({'directive': 'user'}), has_entries({'directive': 'worker_processes'}), has_entries({'directive': 'error_log'}), has_entries({'directive': 'pid'}), has_entries({'directive': 'events'}), has_entries({'directive': 'http'}) )) http = subtree[5]['block'] assert_that(http, contains( has_entries({'directive': 'charset'}), has_entries({'directive': 'log_format'}), has_entries({'directive': 'access_log'}), has_entries({'directive': 'proxy_cache_path'}), has_entries({'directive': 'sendfile'}), has_entries({'directive': 'keepalive_timeout'}), has_entries({'directive': 'gzip'}), has_entries({'directive': 'gzip_types'}), has_entries({'directive': 'root'}), has_entries({'directive': 'server'}), has_entries({'directive': 'server'}), has_entries({'directive': 'upstream'}), has_entries({'directive': 'server'}), has_entries({'directive': 'server'}), )) log_format_args = http[1]['args'] assert_that(log_format_args[0], equal_to('json')) assert_that(log_format_args[1].find('\\'), equal_to(-1))
class NginxConfig(object): """ Nginx config representation Parses configs with all includes, etc Main tasks: - find all log formats - find all access logs - find all error logs - find stub_status url """ def __init__(self, filename, binary=None, prefix=None): self.filename = filename self.binary = binary self.prefix = prefix self.log_formats = {} self.access_logs = {} self.error_logs = {} self.test_errors = [] self.tree = {} self.files = {} self.directories = {} self.directory_map = {} self.index = [] self.ssl_certificates = {} self.parser_errors = [] self.stub_status_urls = [] self.plus_status_external_urls = [] self.plus_status_internal_urls = [] self.parser = NginxConfigParser(filename) self.wait_until = 0 def full_parse(self): context.log.debug('parsing full tree of %s' % self.filename) # parse raw data try: self.parser.parse() self._handle_parse() except Exception as e: context.log.error('failed to parse config at %s (due to %s)' % (self.filename, e.__class__.__name__)) context.log.debug('additional info:', exc_info=True) self.parser = NginxConfigParser(self.filename) # Re-init parser to discard partial data (if any) # Post-handling # try to locate and use default logs (PREFIX/logs/*) self.add_default_logs() # Go through log files and apply exclude rules (log files are added during .__colect_data() self._exclude_logs() def _handle_parse(self): self.tree = self.parser.tree self.files = self.parser.files self.directories = self.parser.directories self.directory_map = self.parser.directory_map self.index = self.parser.index self.parser_errors = self.parser.errors # go through and collect all logical data self.__collect_data(subtree=self.parser.simplify()) def collect_structure(self, include_ssl_certs=False): """ Goes through all files (light-parsed includes) and collects their mtime :param include_ssl_certs: bool - include ssl certs or not :return: {} - dict of files """ files, directories = self.parser.get_structure(include_ssl_certs=include_ssl_certs) context.log.debug('found %s files for %s' % (len(files.keys()), self.filename)) context.log.debug('found %s directories for %s' % (len(directories.keys()), self.filename)) return files, directories def total_size(self): """ Returns the total size of a config tree :return: int size in bytes """ return sum(file_data['size'] for file_data in self.files.itervalues()) def __collect_data(self, subtree=None, ctx=None): """ Searches needed data in config's tree :param subtree: dict with tree to parse :param ctx: dict with context """ ctx = ctx if ctx is not None else {} subtree = subtree if subtree is not None else {} for key, value in subtree.iteritems(): if key == 'error_log': error_logs = value if isinstance(value, list) else [value] for er_log_definition in error_logs: if er_log_definition == 'off': continue split_er_log_definition = er_log_definition.split(' ') log_name = split_er_log_definition[0] log_level = split_er_log_definition[-1] \ if split_er_log_definition[-1] in ERROR_LOG_LEVELS else 'error' # nginx default log level log_name = re.sub('[\'"]', '', log_name) # remove all ' and " # if not syslog, assume it is a file...if not starts with '/' assume relative path if not log_name.startswith('syslog') and not log_name.startswith('/'): log_name = '%s/%s' % (self.prefix, log_name) if log_name not in self.error_logs: self.error_logs[log_name] = log_level elif key == 'access_log': access_logs = value if isinstance(value, list) else [value] for ac_log_definition in access_logs: if ac_log_definition == 'off': continue parts = filter(len, ac_log_definition.split(' ')) log_format = None if len(parts) == 1 else parts[1] log_name = parts[0] log_name = re.sub('[\'"]', '', log_name) # remove all ' and " # if not syslog, assume it is a file...if not starts with '/' assume relative path if not log_name.startswith('syslog') and not log_name.startswith('/'): log_name = '%s/%s' % (self.prefix, log_name) self.access_logs[log_name] = log_format elif key == 'log_format': for k, v in value.iteritems(): self.log_formats[k] = v elif key == 'server' and isinstance(value, list) and 'upstream' not in ctx: for server in value: current_ctx = copy.copy(ctx) if server.get('listen') is None: # if no listens specified, then use default *:80 and *:8000 listen = ['80', '8000'] else: listen = server.get('listen') listen = listen if isinstance(listen, list) else [listen] ctx['ip_port'] = [] for item in listen: listen_first_part = item.split(' ')[0] try: addr, port = self.__parse_listen(listen_first_part) if addr in ('*', '0.0.0.0'): addr = '127.0.0.1' elif addr == '[::]': addr = '[::1]' ctx['ip_port'].append((addr, port)) except Exception as e: context.log.error('failed to parse bad ipv6 listen directive: %s' % listen_first_part) context.log.debug('additional info:', exc_info=True) if 'server_name' in server: ctx['server_name'] = server.get('server_name') self.__collect_data(subtree=server, ctx=ctx) ctx = current_ctx elif key == 'upstream': for upstream, upstream_info in value.iteritems(): current_ctx = copy.copy(ctx) ctx['upstream'] = upstream self.__collect_data(subtree=upstream_info, ctx=ctx) ctx = current_ctx elif key == 'location': for location, location_info in value.iteritems(): current_ctx = copy.copy(ctx) ctx['location'] = location self.__collect_data(subtree=location_info, ctx=ctx) ctx = current_ctx elif key == 'stub_status' and ctx and 'ip_port' in ctx: for url in self.__status_url(ctx): if url not in self.stub_status_urls: self.stub_status_urls.append(url) elif key == 'status' and ctx and 'ip_port' in ctx: # use different url builders for external and internal urls for url in self.__status_url(ctx, server_preferred=True): if url not in self.plus_status_external_urls: self.plus_status_external_urls.append(url) # for internal (agent) usage local ip address is a better choice, # because the external url might not be accessible from a host for url in self.__status_url(ctx, server_preferred=False): if url not in self.plus_status_internal_urls: self.plus_status_internal_urls.append(url) elif isinstance(value, dict): self.__collect_data(subtree=value, ctx=ctx) elif isinstance(value, list): for next_subtree in value: if isinstance(next_subtree, dict): self.__collect_data(subtree=next_subtree, ctx=ctx) @staticmethod def __status_url(ctx, server_preferred=False): """ Creates stub/plus status url based on context :param ctx: {} of current parsing context :param server_preferred: bool - use server_name instead of listen :return: [] of urls """ results = [] location = ctx.get('location', '/') # remove all modifiers location_parts = location.split(' ') final_location_part = location_parts[-1] # generate a random sting that will fit regex location if location.startswith('~'): try: exact_location = rstr.xeger(final_location_part) # check that regex location has / and add it if not exact_location.startswith('/'): exact_location = '/%s' % exact_location except: context.log.debug('bad regex location: %s' % final_location_part) exact_location = None else: exact_location = final_location_part # if an exact location doesn't have / that's not a working location, we should not use it if not exact_location.startswith('/'): context.log.debug('bad exact location: %s' % final_location_part) exact_location = None if exact_location: for ip_port in ctx.get('ip_port'): address, port = ip_port if server_preferred and 'server_name' in ctx: if isinstance(ctx['server_name'], list): address = ctx['server_name'][0].split(' ')[0] else: address = ctx['server_name'].split(' ')[0] results.append('%s:%s%s' % (address, port, exact_location)) return results def run_test(self): """ Tests the configuration using nginx -t Saves event info if syntax check was not successful """ start_time = time.time() context.log.info('running %s -t -c %s' % (self.binary, self.filename)) if self.binary: try: _, nginx_t_err = subp.call("%s -t -c %s" % (self.binary, self.filename), check=False) for line in nginx_t_err: if 'syntax is' in line and 'syntax is ok' not in line: self.test_errors.append(line) except Exception as e: exception_name = e.__class__.__name__ context.log.error('failed to %s -t -c %s due to %s' % (self.binary, self.filename, exception_name)) context.log.debug('additional info:', exc_info=True) end_time = time.time() return end_time - start_time def checksum(self): """ Calculates total checksum of all config files, certificates and permissions :return: str checksum """ checksums = [] for file_path, file_data in self.files.iteritems(): checksums.append(hashlib.sha256(open(file_path).read()).hexdigest()) checksums.append(file_data['permissions']) checksums.append(str(file_data['mtime'])) for dir_data in self.directories.itervalues(): checksums.append(dir_data['permissions']) checksums.append(str(dir_data['mtime'])) for cert in self.ssl_certificates.iterkeys(): checksums.append(hashlib.sha256(open(cert).read()).hexdigest()) return hashlib.sha256('.'.join(checksums)).hexdigest() def __parse_listen(self, listen): """ Parses listen directive value and return ip:port string, like *:80 and so on :param listen: str raw listen :return: str ip:port """ if '[' in listen: # ipv6 addr_port_parts = filter(len, listen.rsplit(']', 1)) address = '%s]' % addr_port_parts[0] if len(addr_port_parts) == 1: # only address specified, add default 80 return address, '80' else: # get port bracket, port = addr_port_parts[1].split(':') return address, port else: # ipv4 addr_port_parts = filter(len, listen.rsplit(':', 1)) if len(addr_port_parts) == 1: # can be address or port only is_port = addr_port_parts[0].isdigit() if is_port: # port! port = addr_port_parts[0] return '*', port else: # it was address only, add default 80 address = addr_port_parts[0] return address, '80' else: address, port = addr_port_parts return address, port def add_default_logs(self): """ By default nginx uses logs placed in --prefix/logs/ directory This method tries to find and add them """ access_log_path = '%s/logs/access.log' % self.prefix if os.path.isfile(access_log_path) and access_log_path not in self.access_logs: self.access_logs[access_log_path] = None error_log_path = '%s/logs/error.log' % self.prefix if os.path.isfile(error_log_path) and error_log_path not in self.error_logs: self.error_logs[error_log_path] = 'error' def run_ssl_analysis(self): """ Iterate over a list of ssl_certificate definitions and run ssl_analysis to construct a dictionary with ssl_certificate value paired with results fo ssl_analysis. :return: float run time """ if not self.parser.ssl_certificates: return start_time = time.time() for cert_filename in set(self.parser.ssl_certificates): if cert_filename not in self.ssl_certificates: ssl_analysis_result = ssl_analysis(cert_filename) if ssl_analysis_result: self.ssl_certificates[cert_filename] = ssl_analysis_result end_time = time.time() return end_time - start_time def _exclude_logs(self): """ Iterate through log file stores and remove ones that match exclude rules. """ # Take comma-separated string of pathname patterns and separate them into individual patterns exclude_rules = context.app_config.get('nginx', {}).get('exclude_logs', '').split(',') for rule in [x for x in exclude_rules if x]: # skip potentially empty rules due to improper formatting # access logs for excluded_file in glib(self.access_logs.keys(), rule): del self.access_logs[excluded_file] # error logs for excluded_file in glib(self.error_logs.keys(), rule): del self.error_logs[excluded_file]