def setUpBuilder(self): raise unittest.SkipTest("no builder")
def setUp(cls): if not STATIC_SUPPORTED: raise unittest.SkipTest("only runs against the cql3 protocol v2.0") super(TestStaticColumn, cls).setUp()
def setUpClass(cls): raise unittest2.SkipTest('foo')
def validate_ccm_viable(): try: common.normalize_interface(('::1', 0)) except: raise unittest.SkipTest('this version of ccm does not support ipv6')
def test_unset_values(self): """ Test to validate that UNSET_VALUEs are bound, and have the expected effect Prepare a statement and insert all values. Then follow with execute excluding parameters. Verify that the original values are unaffected. @since 2.6.0 @jira_ticket PYTHON-317 @expected_result UNSET_VALUE is implicitly added to bind parameters, and properly encoded, leving unset values unaffected. @test_category prepared_statements:binding """ if PROTOCOL_VERSION < 4: raise unittest.SkipTest( "Binding UNSET values is not supported in protocol version < 4" ) # table with at least two values so one can be used as a marker self.session.execute( "CREATE TABLE IF NOT EXISTS test1rf.test_unset_values (k int PRIMARY KEY, v0 int, v1 int)" ) insert = self.session.prepare( "INSERT INTO test1rf.test_unset_values (k, v0, v1) VALUES (?, ?, ?)" ) select = self.session.prepare( "SELECT * FROM test1rf.test_unset_values WHERE k=?") bind_expected = [ # initial condition ((0, 0, 0), (0, 0, 0)), # unset implicit (( 0, 1, ), (0, 1, 0)), ({ 'k': 0, 'v0': 2 }, (0, 2, 0)), ({ 'k': 0, 'v1': 1 }, (0, 2, 1)), # unset explicit ((0, 3, UNSET_VALUE), (0, 3, 1)), ((0, UNSET_VALUE, 2), (0, 3, 2)), ({ 'k': 0, 'v0': 4, 'v1': UNSET_VALUE }, (0, 4, 2)), ({ 'k': 0, 'v0': UNSET_VALUE, 'v1': 3 }, (0, 4, 3)), # nulls still work ((0, None, None), (0, None, None)), ] for params, expected in bind_expected: self.session.execute(insert, params) results = self.session.execute(select, (0, )) self.assertEqual(results[0], expected) self.assertRaises(ValueError, self.session.execute, select, (UNSET_VALUE, 0, 0))
def setUp(self): if CASS_SERVER_VERSION < (3, 0): raise unittest.SkipTest("Materialized views require Cassandra 3.0+")
def _test_vertex_multiple_properties(self, schema, graphson): """ Test verifying vertex property form for various Cardinality All key types are encoded as a list, regardless of cardinality Single cardinality properties have only one value -- the last one added Default is single (this is config dependent) @since 1.0.0 @jira_ticket PYTHON-487 @test_category dse graph """ if schema is not ClassicGraphSchema: raise unittest.SkipTest('skipped because multiple properties are only supported with classic graphs') self.execute_graph('''Schema schema = graph.schema(); schema.propertyKey('mult_key').Text().multiple().ifNotExists().create(); schema.propertyKey('single_key').Text().single().ifNotExists().create(); schema.vertexLabel('MPW1').properties('mult_key').ifNotExists().create(); schema.vertexLabel('SW1').properties('single_key').ifNotExists().create();''', graphson) v = self.execute_graph('''v = graph.addVertex('MPW1') v.property('mult_key', 'value') v''', graphson)[0] self.assertEqual(len(v.properties), 1) self.assertEqual(len(v.properties['mult_key']), 1) self.assertEqual(v.properties['mult_key'][0].label, 'mult_key') self.assertEqual(v.properties['mult_key'][0].value, 'value') # multiple_with_two_values v = self.execute_graph('''g.addV('MPW1').property('mult_key', 'value0').property('mult_key', 'value1')''', graphson)[0] self.assertEqual(len(v.properties), 1) self.assertEqual(len(v.properties['mult_key']), 2) self.assertEqual(v.properties['mult_key'][0].label, 'mult_key') self.assertEqual(v.properties['mult_key'][1].label, 'mult_key') self.assertEqual(v.properties['mult_key'][0].value, 'value0') self.assertEqual(v.properties['mult_key'][1].value, 'value1') # single_with_one_value v = self.execute_graph('''v = graph.addVertex('SW1') v.property('single_key', 'value') v''', graphson)[0] self.assertEqual(len(v.properties), 1) self.assertEqual(len(v.properties['single_key']), 1) self.assertEqual(v.properties['single_key'][0].label, 'single_key') self.assertEqual(v.properties['single_key'][0].value, 'value') if DSE_VERSION < Version('6.8'): # single_with_two_values with self.assertRaises(InvalidRequest): v = self.execute_graph(''' v = graph.addVertex('SW1') v.property('single_key', 'value0').property('single_key', 'value1').next() v ''', graphson)[0] else: # >=6.8 single_with_two_values, first one wins v = self.execute_graph('''v = graph.addVertex('SW1') v.property('single_key', 'value0').property('single_key', 'value1') v''', graphson)[0] self.assertEqual(v.properties['single_key'][0].value, 'value0')
def test_102_email_admin_override_custom_report_mobile(self): """ 1. Use reportuser 2. Reportuser overrides admin user address. 3. Custom report with test not in default. """ if (not can_relay): raise unittest2.SkipTest('Unable to relay through ' + global_functions.testServerHost) if remote_control.quickTestsOnly: raise unittest2.SkipTest('Skipping a time consuming test') # Create settings to receive test_email_address configure_mail_relay() # add administrator adminsettings = uvmContext.adminManager().getSettings() orig_adminsettings = copy.deepcopy(adminsettings) adminsettings['users']['list'].append(create_admin_user(useremail=test_email_address)) uvmContext.adminManager().setSettings(adminsettings) settings = app.getSettings() # add custom template with a test not in daily reports settings["emailTemplates"]["list"] = settings["emailTemplates"]["list"][:1] settings["emailTemplates"]["list"].append(create_email_template(mobile=True)) # add report user with test_email_address settings["reportsUsers"]["list"] = settings["reportsUsers"]["list"][:1] settings["reportsUsers"]["list"].append(create_reports_user(profile_email=test_email_address, email_template_id=2)) app.setSettings(settings) # send email subprocess.call([global_functions.get_prefix()+"/usr/share/untangle/bin/reports-generate-fixed-reports.py"],stdout=subprocess.PIPE,stderr=subprocess.PIPE) # look for email email_found = fetch_email( "/tmp/test_102_email_admin_override_custom_report_mobile_file", test_email_address ) if email_found: email_context_found1 = remote_control.run_command("grep -i 'Custom Report' /tmp/test_102_email_admin_override_custom_report_mobile_file 2>&1", stdout=True) email_context_found2 = remote_control.run_command("grep -i 'Administration-VWuRol5uWw' /tmp/test_102_email_admin_override_custom_report_mobile_file 2>&1", stdout=True) # restore uvmContext.adminManager().setSettings(orig_adminsettings) assert(email_found) assert((email_context_found1) and (email_context_found2)) # Verify that all images are less than 3502350. # copy mail from remote client subprocess.call("scp -q -i %s testshell@%s:/tmp/test_102_email_admin_override_custom_report_mobile_file /tmp/" % (remote_control.hostKeyFile, remote_control.clientIP), shell=True) fp = open("/tmp/test_102_email_admin_override_custom_report_mobile_file") email_string = fp.read() fp.close() subprocess.call("rm /tmp/test_102_email_admin_override_custom_report_mobile_file", shell=True) # Delete the first line as it is blank and throws off the parser email_string = '\n'.join(email_string.split('\n')[1:]) msg = email.message_from_string(email_string) mime_content_ids = [] for part in msg.walk(): if part.get_content_maintype() == "image": # print("Image found") for index, key in enumerate(part.keys()): if key == "Content-ID" and "untangle.int" in part.values()[index]: email_image = part.get_payload(decode=True) im = Image.open(StringIO(email_image)) (image_width,image_height) = im.size print("Image %s width: %d height: %d" % (part.values()[index], image_width, image_height)) assert(image_width <= 350 and image_height <= 350)
def test_103_email_report_verify_apps(self): """ 1) Install all apps 2) Generate a report 3) Verify that the emailed report contains a section for each app """ global app,apps_list,apps_name_list if (not can_relay): raise unittest2.SkipTest('Unable to relay through ' + global_functions.testServerHost) if remote_control.quickTestsOnly: raise unittest2.SkipTest('Skipping a time consuming test') # create settings to receive test_email_address configure_mail_relay() # add administrator adminsettings = uvmContext.adminManager().getSettings() orig_adminsettings = copy.deepcopy(adminsettings) adminsettings['users']['list'].append(create_admin_user(useremail=test_email_address)) uvmContext.adminManager().setSettings(adminsettings) # clear all report users settings = app.getSettings() settings["reportsUsers"]["list"] = settings["reportsUsers"]["list"][:1] app.setSettings(settings) # install all the apps that aren't already installed system_stats = uvmContext.metricManager().getStats() # print system_stats system_memory = system_stats['systemStats']['MemTotal'] if (int(system_memory) < 2200000000): # don't use high memory apps in devices with 2G or less. apps_list = apps_list_short apps_name_list = apps_name_list_short apps = [] for name in apps_list: if (uvmContext.appManager().isInstantiated(name)): print("App %s already installed" % name) else: apps.append( uvmContext.appManager().instantiate(name, default_policy_id) ) # create some traffic result = remote_control.is_online(tries=1) # flush out events app.flushEvents() # send emails subprocess.call([global_functions.get_prefix()+"/usr/share/untangle/bin/reports-generate-fixed-reports.py"],stdout=subprocess.PIPE,stderr=subprocess.PIPE) # look for email email_found = fetch_email( "/tmp/test_103_email_report_admin_file", test_email_address ) # look for all the appropriate sections in the report email results = [] if email_found: for str in apps_name_list: results.append(remote_control.run_command("grep -q -i '%s' /tmp/test_103_email_report_admin_file 2>&1"%str)) # restore uvmContext.adminManager().setSettings(orig_adminsettings) # remove apps that were installed above for a in apps: uvmContext.appManager().destroy( a.getAppSettings()["id"] ) assert(email_found) for result in results: assert(result == 0)
def test_040_remote_syslog(self): if (not can_syslog): raise unittest2.SkipTest('Unable to syslog through ' + syslog_server_host) firewall_app = None if (uvmContext.appManager().isInstantiated("firewall")): print("App %s already installed" % "firewall") firewall_app = uvmContext.appManager().app("firewall") else: firewall_app = uvmContext.appManager().instantiate("firewall", default_policy_id) # Install firewall rule to generate syslog events rules = firewall_app.getRules() rules["list"].append(create_firewall_rule("SRC_ADDR",remote_control.clientIP)); firewall_app.setRules(rules); rules = firewall_app.getRules() # Get rule ID for rule in rules['list']: if rule['enabled'] and rule['block']: targetRuleId = rule['ruleId'] break # Setup syslog to send events to syslog host in /config/events/syslog syslogSettings = uvmContext.eventManager().getSettings() syslogSettings["syslogEnabled"] = True syslogSettings["syslogPort"] = 514 syslogSettings["syslogProtocol"] = "UDP" syslogSettings["syslogHost"] = syslog_server_host uvmContext.eventManager().setSettings( syslogSettings ) # create some traffic (blocked by firewall and thus create a syslog event) exactly_now = datetime.now() exactly_now_minus1 = datetime.now() - timedelta(minutes=1) exactly_now_plus1 = datetime.now() + timedelta(minutes=1) timestamp = exactly_now.strftime('%Y-%m-%d %H:%M') timestamp_minus1 = exactly_now_minus1.strftime('%Y-%m-%d %H:%M') timestamp_now_plus1 = exactly_now_plus1.strftime('%Y-%m-%d %H:%M') result = remote_control.is_online(tries=1) # flush out events app.flushEvents() # remove the firewall rule aet syslog back to original settings app.setSettings(orig_settings) rules["list"]=[]; firewall_app.setRules(rules); # remove firewall if firewall_app != None: uvmContext.appManager().destroy( firewall_app.getAppSettings()["id"] ) firewall_app = None # parse the output and look for a rule that matches the expected values tries = 5 found_count = 0 timestamp_variations = [str('\"timeStamp\":\"%s' % timestamp_minus1),str('\"timeStamp\":\"%s' % timestamp_now_plus1)] strings_to_find = ['\"blocked\":true',str('\"ruleId\":%i' % targetRuleId),str('\"timeStamp\":\"%s' % timestamp)] num_string_find = len(strings_to_find) while (tries > 0 and found_count < num_string_find): # get syslog results on server rsyslogResult = remote_control.run_command("sudo tail -n 200 /var/log/syslog | grep 'FirewallEvent'", host=syslog_server_host, stdout=True) tries -= 1 for line in rsyslogResult.splitlines(): print("\nchecking line: %s " % line) found_count = 0 for string in strings_to_find: if not string in line: print("missing: %s" % string) if ('timeStamp' in string): # Allow +/- one minute in timestamp if (timestamp_variations [0] in line) or (timestamp_variations [1] in line): print("found: time with varation %s or %s" % (timestamp_variations [0],timestamp_variations [1])) found_count += 1 else: break else: # continue break else: found_count += 1 print("found: %s" % string) # break if all the strings have been found. if found_count == num_string_find: break time.sleep(2) # Disable syslog syslogSettings = uvmContext.eventManager().getSettings() syslogSettings["syslogEnabled"] = False uvmContext.eventManager().setSettings( syslogSettings ) assert(found_count == num_string_find)
def test_100_email_report_admin(self): """ The "default" configuration test: - Administrator email account gets """ if (not can_relay): raise unittest2.SkipTest('Unable to relay through ' + global_functions.testServerHost) if remote_control.quickTestsOnly: raise unittest2.SkipTest('Skipping a time consuming test') # create settings to receive test_email_address configure_mail_relay() # add administrator adminsettings = uvmContext.adminManager().getSettings() orig_adminsettings = copy.deepcopy(adminsettings) adminsettings['users']['list'].append(create_admin_user(useremail=test_email_address)) uvmContext.adminManager().setSettings(adminsettings) # clear all report users settings = app.getSettings() settings["reportsUsers"]["list"] = settings["reportsUsers"]["list"][:1] app.setSettings(settings) # send emails subprocess.call([global_functions.get_prefix()+"/usr/share/untangle/bin/reports-generate-fixed-reports.py"],stdout=subprocess.PIPE,stderr=subprocess.PIPE) # look for email email_found = fetch_email( "/tmp/test_100_email_report_admin_file", test_email_address ) email_context_found1 = "" email_context_found2 = "" if email_found: email_context_found1 = remote_control.run_command("grep -i -e 'Reports:.*Daily.*' /tmp/test_100_email_report_admin_file 2>&1", stdout=True) email_context_found2 = remote_control.run_command("grep -i -e 'Content-Type: image/png; name=' /tmp/test_100_email_report_admin_file 2>&1", stdout=True) # restore uvmContext.adminManager().setSettings(orig_adminsettings) assert(email_found) assert((email_context_found1) and (email_context_found2)) ## Verify that all images are intact. # copy mail from remote client subprocess.call("scp -q -i %s testshell@%s:/tmp/test_100_email_report_admin_file /tmp/" % (remote_control.hostKeyFile, remote_control.clientIP), shell=True) fp = open("/tmp/test_100_email_report_admin_file") email_string = fp.read() fp.close() subprocess.call("rm /tmp/test_100_email_report_admin_file", shell=True) # Delete the first line as it is blank and throws off the parser email_string = '\n'.join(email_string.split('\n')[1:]) msg = email.message_from_string(email_string) mime_content_ids = [] parser = ContentIdParser(); for part in msg.walk(): if part.get_content_maintype() == "image": for index, key in enumerate(part.keys()): if key == "Content-ID": mime_content_ids.append(part.values()[index]) elif part.get_content_maintype() == "text": parser.feed(part.get_payload(decode=True)) assert(len(parser.content_ids) == len(mime_content_ids))
def setUpClass(cls): """Provide a server config and an iterable of resources to delete.""" cls.cfg = config.get_config() cls.resources = set() if cls.cfg.version < Version('2.8'): raise unittest2.SkipTest('These tests require at least Pulp 2.8.')
def test_can_insert_tuples_all_collection_datatypes(self): """ Ensure tuple subtypes are appropriately handled for maps, sets, and lists. """ if self.cass_version < (2, 1, 0): raise unittest.SkipTest( "The tuple type was introduced in Cassandra 2.1") c = Cluster(protocol_version=PROTOCOL_VERSION) s = c.connect(self.keyspace_name) # set the row_factory to dict_factory for programmatic access # set the encoder for tuples for the ability to write tuples s.row_factory = dict_factory s.encoder.mapping[tuple] = s.encoder.cql_encode_tuple values = [] # create list values for datatype in PRIMITIVE_DATATYPES: values.append('v_{0} frozen<tuple<list<{1}>>>'.format( len(values), datatype)) # create set values for datatype in PRIMITIVE_DATATYPES: values.append('v_{0} frozen<tuple<set<{1}>>>'.format( len(values), datatype)) # create map values for datatype in PRIMITIVE_DATATYPES: datatype_1 = datatype_2 = datatype if datatype == 'blob': # unhashable type: 'bytearray' datatype_1 = 'ascii' values.append('v_{0} frozen<tuple<map<{1}, {2}>>>'.format( len(values), datatype_1, datatype_2)) # make sure we're testing all non primitive data types in the future if set(COLLECTION_TYPES) != set(['tuple', 'list', 'map', 'set']): raise NotImplemented('Missing datatype not implemented: {}'.format( set(COLLECTION_TYPES) - set(['tuple', 'list', 'map', 'set']))) # create table s.execute("CREATE TABLE tuple_non_primative (" "k int PRIMARY KEY, " "%s)" % ', '.join(values)) i = 0 # test tuple<list<datatype>> for datatype in PRIMITIVE_DATATYPES: created_tuple = tuple([[get_sample(datatype)]]) s.execute( "INSERT INTO tuple_non_primative (k, v_%s) VALUES (0, %s)", (i, created_tuple)) result = s.execute( "SELECT v_%s FROM tuple_non_primative WHERE k=0", (i, ))[0] self.assertEqual(created_tuple, result['v_%s' % i]) i += 1 # test tuple<set<datatype>> for datatype in PRIMITIVE_DATATYPES: created_tuple = tuple([sortedset([get_sample(datatype)])]) s.execute( "INSERT INTO tuple_non_primative (k, v_%s) VALUES (0, %s)", (i, created_tuple)) result = s.execute( "SELECT v_%s FROM tuple_non_primative WHERE k=0", (i, ))[0] self.assertEqual(created_tuple, result['v_%s' % i]) i += 1 # test tuple<map<datatype, datatype>> for datatype in PRIMITIVE_DATATYPES: if datatype == 'blob': # unhashable type: 'bytearray' created_tuple = tuple([{ get_sample('ascii'): get_sample(datatype) }]) else: created_tuple = tuple([{ get_sample(datatype): get_sample(datatype) }]) s.execute( "INSERT INTO tuple_non_primative (k, v_%s) VALUES (0, %s)", (i, created_tuple)) result = s.execute( "SELECT v_%s FROM tuple_non_primative WHERE k=0", (i, ))[0] self.assertEqual(created_tuple, result['v_%s' % i]) i += 1 c.shutdown()
limitations under the License. """ try: import unittest2 as unittest except ImportError: import unittest try: # Try to import modules from multiprocessing import Process, Queue # IronPython fails when creating a queue Queue() except ImportError: # Some interpreters don't have support for multiprocessing raise unittest.SkipTest("Interpreter doesn't support multiprocessing") try: # Trick to use coverage in sub-processes, from: # http://blog.schettino72.net/posts/python-code-coverage-multiprocessing.html import coverage class WrappedProcess(Process): def _bootstrap(self): cov = coverage.Coverage(data_suffix=True) cov.start() try: return Process._bootstrap(self) finally: cov.stop() cov.save()
def setUp(self): if is_monkey_patched(): raise unittest.SkipTest("Can't test asyncore with monkey patching") ConnectionTests.setUp(self)
def setUp(self): if LibevConnection is None: raise unittest.SkipTest( 'libev does not appear to be installed correctly')
def test_ParseMsimResult(self, path): # type: (...) -> Any if not isinstance(self.builder, MSim): raise unittest2.SkipTest("ModelSim only test") self.assertEqual( list( self.builder._makeRecords( '** Error: %s(21): near "EOF": (vcom-1576) ' "expecting ';'." % path)), [ BuilderDiag( builder_name=self.builder_name, text="near \"EOF\": expecting ';'.", filename=Path(path), line_number=20, error_code="vcom-1576", severity=DiagType.ERROR, ) ], ) self.assertEqual( list( self.builder._makeRecords( "** Warning: %s(23): (vcom-1320) Type of expression " "\"(OTHERS => '0')\" is ambiguous; using element type " "STD_LOGIC_VECTOR, not aggregate type register_type." % path)), [ BuilderDiag( builder_name=self.builder_name, text="Type of expression \"(OTHERS => '0')\" is " "ambiguous; using element type STD_LOGIC_VECTOR, not " "aggregate type register_type.", filename=Path(path), line_number=22, error_code="vcom-1320", severity=DiagType.WARNING, ) ], ) self.assertEqual( list( self.builder._makeRecords( "** Warning: %s(39): (vcom-1514) Range choice direction " "(downto) does not determine aggregate index range " "direction (to)." % path)), [ BuilderDiag( builder_name=self.builder_name, text="Range choice direction (downto) does not determine " "aggregate index range direction (to).", filename=Path(path), line_number=38, error_code="vcom-1514", severity=DiagType.WARNING, ) ], ) self.assertEqual( list( self.builder._makeRecords( "** Error: (vcom-11) Could not find work.regfile_pkg.")), [ BuilderDiag( builder_name=self.builder_name, text="Could not find work.regfile_pkg.", error_code="vcom-11", severity=DiagType.ERROR, ) ], ) self.assertEqual( list( self.builder._makeRecords( "** Error (suppressible): %s(7): (vcom-1195) Cannot find " 'expanded name "work.regfile_pkg".' % path)), [ BuilderDiag( builder_name=self.builder_name, text='Cannot find expanded name "work.regfile_pkg".', filename=Path(path), line_number=6, error_code="vcom-1195", severity=DiagType.ERROR, ) ], ) self.assertEqual( list( self.builder._makeRecords( "** Error: %s(7): Unknown expanded name." % path)), [ BuilderDiag( builder_name=self.builder_name, text="Unknown expanded name.", line_number="6", filename=Path(path), severity=DiagType.ERROR, ) ], ) self.assertEqual( list( self.builder._makeRecords( "** Warning: [14] %s(103): (vcom-1272) Length of expected " "is 4; length of actual is 8." % path)), [ BuilderDiag( builder_name=self.builder_name, text="Length of expected is 4; length of actual is 8.", line_number="102", error_code="vcom-1272", filename=Path(path), severity=DiagType.WARNING, ) ], ) self.assertEqual( list( self.builder._makeRecords( "** Warning: [14] %s(31): (vcom-1246) Range -1 downto 0 " "is null." % path)), [ BuilderDiag( builder_name=self.builder_name, text="Range -1 downto 0 is null.", line_number="30", error_code="vcom-1246", filename=Path(path), severity=DiagType.WARNING, ) ], )
def test_120_natOneToOneWanDown(self): # create a 1:1 NAT and then down the wan which the NAT is set to # if there are more than one WAN if remote_control.quickTestsOnly: raise unittest2.SkipTest('Skipping a time consuming test') if (len(indexOfWans) < 2): raise unittest2.SkipTest( "Need at least two WANS for combination of wan-balancer and wan failover tests" ) pre_count = global_functions.get_app_metric_value( appWanFailover, "changed") # raise unittest2.SkipTest('Skipping test_120_natOneToOneWanDown as not possible with current network layout ') netsettings = uvmContext.networkManager().getNetworkSettings() nukeWanBalancerRules() nukeFailoverRules() # create valid failover tests for wanIndexTup in indexOfWans: wanIndex = wanIndexTup[0] buildWanTestRule(wanIndex) for wanIndexTup in indexOfWans: # get the WAN IP address which was source routed wanIndex = wanIndexTup[0] wanIP = wanIndexTup[1] wanExternalIP = wanIndexTup[2] # Add networking route which does not handle re-routing if WAN is down netsettings['natRules']['list'] = [] netsettings['natRules']['list'].append( buildNatRule("DST_ADDR", ip_address_testdestination, wanIP)) uvmContext.networkManager().setNetworkSettings(netsettings) # Test that only the routed interface is used 5 times subprocess.check_output("ip route flush cache", shell=True) for x in range(0, 5): result = global_functions.get_public_ip_address() print "NAT 1:1 IP %s External IP %s and retrieved IP %s" % ( wanIP, wanExternalIP, result) assert (result == wanExternalIP) # now down the selected wan and see if traffic flows out the other wan buildWanTestRule(wanIndex, "ping", "192.168.244.1") # Wait for targeted the WAN to be off line before testing that the WAN is off line. timeout = 50000 online = True offlineWanIndex = wanIndex while online and timeout > 0: timeout -= 1 wanStatus = appWanFailover.getWanStatus() for statusInterface in wanStatus['list']: if statusInterface['interfaceId'] == offlineWanIndex: online = statusInterface['online'] time.sleep(10) # Let WAN balancer see that the interface is down subprocess.check_output("ip route flush cache", shell=True) for x in range(0, 5): result = global_functions.get_public_ip_address() print "WAN Down NAT 1:1 IP %s External IP %s and retrieved IP %s" % ( wanIP, wanExternalIP, result) assert (result == wanExternalIP) uvmContext.networkManager().setNetworkSettings(orig_netsettings) nukeFailoverRules() # Check to see if the faceplate counters have incremented. post_count = global_functions.get_app_metric_value( appWanFailover, "changed") assert (pre_count < post_count)
def wrapper(*args, **kwargs): if not cond: return func(*args, **kwargs) r = reason if reason else 'Skipping due expected condition is true' LOGGER.info(r) raise unittest2.SkipTest(r)
def _test_vertex_multiple_properties(self, schema, graphson): """ Test verifying vertex property form for various Cardinality All key types are encoded as a list, regardless of cardinality Single cardinality properties have only one value -- the last one added Default is single (this is config dependent) @since 1.0.0 @jira_ticket PYTHON-641 @test_category dse graph """ if schema is not ClassicGraphSchema: raise unittest.SkipTest( 'skipped because multiple properties are only supported with classic graphs' ) s = self.session s.execute_graph('''Schema schema = graph.schema(); schema.propertyKey('mult_key').Text().multiple().ifNotExists().create(); schema.propertyKey('single_key').Text().single().ifNotExists().create(); schema.vertexLabel('MPW1').properties('mult_key').ifNotExists().create(); schema.vertexLabel('MPW2').properties('mult_key').ifNotExists().create(); schema.vertexLabel('SW1').properties('single_key').ifNotExists().create();''' ) mpw1v = s.execute_graph('''v = graph.addVertex('MPW1') v.property('mult_key', 'value') v''')[0] mpw2v = s.execute_graph( '''g.addV('MPW2').property('mult_key', 'value0').property('mult_key', 'value1')''' )[0] g = self.fetch_traversal_source(graphson) traversal = g.V(mpw1v.id).properties() vertex_props = self.execute_traversal(traversal, graphson) self.assertEqual(len(vertex_props), 1) self.assertEqual(self.fetch_key_from_prop(vertex_props[0]), "mult_key") self.assertEqual(vertex_props[0].value, "value") # multiple_with_two_values #v = s.execute_graph('''g.addV(label, 'MPW2', 'mult_key', 'value0', 'mult_key', 'value1')''')[0] traversal = g.V(mpw2v.id).properties() vertex_props = self.execute_traversal(traversal, graphson) self.assertEqual(len(vertex_props), 2) self.assertEqual(self.fetch_key_from_prop(vertex_props[0]), 'mult_key') self.assertEqual(self.fetch_key_from_prop(vertex_props[1]), 'mult_key') self.assertEqual(vertex_props[0].value, 'value0') self.assertEqual(vertex_props[1].value, 'value1') # single_with_one_value v = s.execute_graph('''v = graph.addVertex('SW1') v.property('single_key', 'value') v''')[0] traversal = g.V(v.id).properties() vertex_props = self.execute_traversal(traversal, graphson) self.assertEqual(len(vertex_props), 1) self.assertEqual(self.fetch_key_from_prop(vertex_props[0]), "single_key") self.assertEqual(vertex_props[0].value, "value")
def setUp(self): self.src_filename = os.path.join(os.path.dirname(os.path.abspath(__file__)), '496908818s.nwb') self.filename = 'test_496908818s.nwb' raise unittest.SkipTest('Backwards compatibility not currently supported')
def __test_udt(self, schema, graphson, address_class, address_with_tags_class, complex_address_class, complex_address_with_owners_class): if schema is not CoreGraphSchema or DSE_VERSION < Version('6.8'): raise unittest.SkipTest( "Graph UDT is only supported with DSE 6.8+ and Core graphs.") ep = self.get_execution_profile(graphson) Address = address_class AddressWithTags = address_with_tags_class ComplexAddress = complex_address_class ComplexAddressWithOwners = complex_address_with_owners_class # setup udt self.session.execute_graph(""" schema.type('address').property('address', Text).property('city', Text).property('state', Text).create(); schema.type('addressTags').property('address', Text).property('city', Text).property('state', Text). property('tags', setOf(Text)).create(); schema.type('complexAddress').property('address', Text).property('address_tags', frozen(typeOf('addressTags'))). property('city', Text).property('state', Text).property('props', mapOf(Text, Int)).create(); schema.type('complexAddressWithOwners').property('address', Text). property('address_tags', frozen(typeOf('addressTags'))). property('city', Text).property('state', Text).property('props', mapOf(Text, Int)). property('owners', frozen(listOf(tupleOf(Text, Int)))).create(); """, execution_profile=ep) # wait max 10 seconds to get the UDT discovered. wait_until_not_raised( lambda: self.session.cluster.register_user_type( self.graph_name, 'address', Address), 1, 10) wait_until_not_raised( lambda: self.session.cluster.register_user_type( self.graph_name, 'addressTags', AddressWithTags), 1, 10) wait_until_not_raised( lambda: self.session.cluster.register_user_type( self.graph_name, 'complexAddress', ComplexAddress), 1, 10) wait_until_not_raised( lambda: self.session.cluster.register_user_type( self.graph_name, 'complexAddressWithOwners', ComplexAddressWithOwners), 1, 10) data = { "udt1": ["typeOf('address')", Address('1440 Rd Smith', 'Quebec', 'QC')], "udt2": [ "tupleOf(typeOf('address'), Text)", (Address('1440 Rd Smith', 'Quebec', 'QC'), 'hello') ], "udt3": [ "tupleOf(frozen(typeOf('address')), Text)", (Address('1440 Rd Smith', 'Quebec', 'QC'), 'hello') ], "udt4": [ "tupleOf(tupleOf(Int, typeOf('address')), Text)", ((42, Address('1440 Rd Smith', 'Quebec', 'QC')), 'hello') ], "udt5": [ "tupleOf(tupleOf(Int, typeOf('addressTags')), Text)", ((42, AddressWithTags('1440 Rd Smith', 'Quebec', 'QC', {'t1', 't2'})), 'hello') ], "udt6": [ "tupleOf(tupleOf(Int, typeOf('complexAddress')), Text)", ((42, ComplexAddress( '1440 Rd Smith', AddressWithTags('1440 Rd Smith', 'Quebec', 'QC', {'t1', 't2'}), 'Quebec', 'QC', { 'p1': 42, 'p2': 33 })), 'hello') ], "udt7": [ "tupleOf(tupleOf(Int, frozen(typeOf('complexAddressWithOwners'))), Text)", ((42, ComplexAddressWithOwners( '1440 Rd Smith', AddressWithTags('1440 CRd Smith', 'Quebec', 'QC', {'t1', 't2'}), 'Quebec', 'QC', { 'p1': 42, 'p2': 33 }, [('Mike', 43), ('Gina', 39)])), 'hello') ] } g = self.fetch_traversal_source(graphson) for typ, value in six.itervalues(data): vertex_label = VertexLabel([typ]) property_name = next(six.iterkeys(vertex_label.non_pk_properties)) schema.create_vertex_label(self.session, vertex_label, execution_profile=ep) write_traversal = g.addV(str(vertex_label.label)).property('pkid', vertex_label.id). \ property(property_name, value) self.execute_traversal(write_traversal, graphson) #vertex = list(schema.add_vertex(self.session, vertex_label, property_name, value, execution_profile=ep))[0] #vertex_properties = list(schema.get_vertex_properties( # self.session, vertex, execution_profile=ep)) read_traversal = g.V().hasLabel(str( vertex_label.label)).has(property_name).properties() vertex_properties = self.execute_traversal(read_traversal, graphson) self.assertEqual(len(vertex_properties), 2) # include pkid for vp in vertex_properties: if vp.label == 'pkid': continue self.assertIsInstance(vp, (VertexProperty, TravVertexProperty)) self.assertEqual(vp.label, property_name) self.assertEqual(vp.value, value)
def setUp(self): if PROTOCOL_VERSION < 4: raise unittest.SkipTest( "Protocol v4 datatypes require native protocol 4+, currently using: {0}" .format(PROTOCOL_VERSION))
def setUp(self): uri = queries.uri('localhost', 5432, 'postgres', 'postgres') try: self.session = queries.Session(uri, pool_max_size=10) except queries.OperationalError as error: raise unittest.SkipTest(str(error).split('\n')[0])
def setUp(self): if twistedreactor is None: raise unittest.SkipTest("Twisted libraries not available") twistedreactor.TwistedConnection.initialize_reactor() super(TestTwistedTimer, self).setUp()
def test_murmur3_c(self): try: from cassandra.cmurmur3 import murmur3 self._verify_hash(murmur3) except ImportError: raise unittest.SkipTest('The cmurmur3 extension is not available')
class VirusBlockerBaseTests(unittest2.TestCase): @staticmethod def appName(): return "untangle-base-virus-blocker" @staticmethod def shortName(): return "untangle" @staticmethod def displayName(): return "Virus Blocker Lite" @staticmethod def appNameSSLInspector(): return "ssl-inspector" @staticmethod def initialSetUp(self): global app, md5StdNum, appSSL, appSSLData, canRelay # download eicar and trojan files before installing virus blocker self.ftp_user_name, self.ftp_password = global_functions.get_live_account_info( "ftp") remote_control.run_command( "rm -f /tmp/eicar /tmp/std_022_ftpVirusBlocked_file /tmp/temp_022_ftpVirusPassSite_file" ) result = remote_control.run_command( "wget --user="******" --password='******' -q -O /tmp/eicar http://test.untangle.com/virus/eicar.com") assert (result == 0) result = remote_control.run_command( "wget --user="******" --password='******' -q -O /tmp/std_022_ftpVirusBlocked_file ftp://" + global_functions.ftp_server + "/virus/fedexvirus.zip") assert (result == 0) md5StdNum = remote_control.run_command( "\"md5sum /tmp/std_022_ftpVirusBlocked_file | awk '{print $1}'\"", stdout=True) self.md5StdNum = md5StdNum # print("md5StdNum <%s>" % md5StdNum) assert (result == 0) try: canRelay = global_functions.send_test_email(mailhost=testsiteIP) except Exception, e: canRelay = False if (uvmContext.appManager().isInstantiated(self.appName())): raise unittest2.SkipTest('app %s already instantiated' % self.appName()) app = uvmContext.appManager().instantiate(self.appName(), default_policy_id) self.app = app if uvmContext.appManager().isInstantiated(self.appNameSSLInspector()): raise Exception('app %s already instantiated' % self.appNameSSLInspector()) appSSL = uvmContext.appManager().instantiate( self.appNameSSLInspector(), default_policy_id) # appSSL.start() # leave app off. app doesn't auto-start appSSLData = appSSL.getSettings() # Enable cloud connection system_settings = uvmContext.systemManager().getSettings() system_settings['cloudEnabled'] = True uvmContext.systemManager().setSettings(system_settings)
def setUpBuilder(self): raise unittest.SkipTest( 'Cannot run test unless addContainer is implemented')
def setUpModule(): raise unittest2.SkipTest('foo')
def setUp(self): super(TestGtidBinLogStreamReader, self).setUp() if not self.supportsGTID: raise unittest.SkipTest( "database does not support GTID, skipping GTID tests")