def test_logs_subcommand():
    get_logs_method = MagicMock()
    result_value = ""
    get_logs_method.return_value = (True, result_value)
    spark_controller.get_logs = get_logs_method

    command = "logs -s"
    name = "sessions_name"
    line = " ".join([command, name])
    cell = "cell code"

    # Could get results
    result = magic.spark(line, cell)

    get_logs_method.assert_called_once_with(name)
    assert result is None
    ipython_display.write.assert_called_once_with(result_value)

    # Could not get results
    get_logs_method.reset_mock()
    get_logs_method.return_value = (False, result_value)

    result = magic.spark(line, cell)

    get_logs_method.assert_called_once_with(name)
    assert result is None
    ipython_display.send_error.assert_called_once_with(result_value)
Example #2
0
def test_util_tags_stats_tool_fitered_data(
    m_path,
    m_locales,
    m_proj,
    m_tag,
    trs_mock,
):
    # tests all filter functions are called when filtering data
    # and that they are called with the result of previous

    stats_tool = TagsStatsTool()
    m = m_tag, m_proj, m_locales, m_path

    # mock trs for translated_resources.all()
    _m = MagicMock()
    _m.all.return_value = 0
    trs_mock.return_value = _m

    for i, _m in enumerate(m):
        if i >= len(m) - 1:
            _m.return_value = 23
        else:
            _m.return_value = i

    # get the filtered_data
    result = stats_tool.filtered_data
    assert result == 23
    for i, _m in enumerate(m):
        assert _m.called
        if i > 0:
            assert _m.call_args[0][0] == i - 1
Example #3
0
    def mock_controller(self, plugin_label, method, return_value = None,
            side_effect = None, mock = None):
        """
        Mocks controller by label. Can only be used to test controllers
        that get instantiated automatically by cement.
        @param plugin_label: e.g. 'drupal'
        @param method: e.g. 'enumerate_plugins'
        @param return_value: what to return. Default is None, unless the
            method starts with enumerate_*, in which case the result is a
            tuple as expected by BasePlugin.
        @param mock: the MagicMock to place. If None, a blank MagicMock is
            created.
        @param side_effect: if set to an exception, it will raise an
            exception.
        """
        if mock:
            m = mock
        else:
            m = MagicMock()

        if return_value != None:
            m.return_value = return_value
        else:
            if method.startswith("enumerate_"):
                m.return_value = ({"a":[]}, True)

        if side_effect:
            m.side_effect = side_effect

        setattr(self.controller_get(plugin_label), method, m)
        return m
Example #4
0
def test_with_per_method_throttling_calls_calls_the_right_api_for_each_method():
    """
    When apis[0] is throttled for METHOD_NAME_1 at now(),
    and apis[1] is throttled for METHOD_NAME_2 at now(),
     it should route METHOD_NAME_1 to apis[1],
     and METHOD_NAME_2 to apis[0].
    """

    METHOD_NAME_1 = 'followers_ids'
    METHOD_NAME_2 = 'user_timeline'

    api_mock = MagicMock()
    api_mock.return_value = api_mock
    ut_mock = Mock(return_value='called user_timeline on api_mock')
    api_mock.user_timeline = ut_mock

    api_mock2 = MagicMock()
    api_mock2.return_value = api_mock2
    fids_mock = Mock(return_value='called followers_ids on api_mock2')
    api_mock2.followers_ids = fids_mock

    with patch('tweepy.API', api_mock):
        api_pool = tweepy_pool.APIPool([OAUTH_DICT, OAUTH_DICT])
        api_pool._apis[0][0] = api_mock
        api_pool._apis[0][1][METHOD_NAME_1] = datetime.now()
        api_pool._apis[1][0] = api_mock2
        api_pool._apis[1][1][METHOD_NAME_2] = datetime.now()

    api_pool._call_with_throttling_per_method(METHOD_NAME_2, id=456)
    api_pool._call_with_throttling_per_method(METHOD_NAME_1, id=654)

    api_mock.user_timeline.assert_called_with(id=456)
    api_mock2.followers_ids.assert_called_with(id=654)
Example #5
0
def test_pick_api_with_shortest_wait_for_method_picks_different_api_for_each_method():
    """
    When apis[0] is throttled for METHOD_NAME_1 at now(),
    and apis[1] is throttled for METHOD_NAME_2 at now(),
     it should pick apis[1] for that METHOD_NAME_1
     and pick apis[0] for METHOD_NAME_2.
    """
    METHOD_NAME_1 = 'twitter_method_1'
    METHOD_NAME_2 = 'twitter_method_2'

    api_mock = MagicMock()
    api_mock.return_value = api_mock
    api_mock2 = MagicMock()
    api_mock2.return_value = api_mock2

    with patch('tweepy.API', api_mock):
        api_pool = tweepy_pool.APIPool([OAUTH_DICT, OAUTH_DICT])
        api_pool._apis[0][0] = api_mock
        api_pool._apis[0][1][METHOD_NAME_1] = datetime.now()
        api_pool._apis[1][0] = api_mock2
        api_pool._apis[1][1][METHOD_NAME_2] = datetime.now()


    picked_api_for_method_1 = api_pool._pick_api_with_shortest_waiting_time_for_method(METHOD_NAME_1)
    picked_api_for_method_2 = api_pool._pick_api_with_shortest_waiting_time_for_method(METHOD_NAME_2)

    assert api_mock2 == picked_api_for_method_1[0]
    assert api_mock == picked_api_for_method_2[0]
def test_appdynamics_log_query(monkeypatch, fx_log_hits):
    es_url, kwargs, res = fx_log_hits

    search = MagicMock()
    search.return_value = res

    monkeypatch.setattr('zmon_worker_monitor.builtins.plugins.appdynamics.ElasticsearchWrapper.search', search)

    timestamp = 1234
    timestamp_mock = MagicMock()
    timestamp_mock.return_value = timestamp
    monkeypatch.setattr(AppdynamicsWrapper, '_AppdynamicsWrapper__get_timestamp', timestamp_mock)

    cli = AppdynamicsWrapper(url=URL, es_url=es_url, username=USER, password=PASS, index_prefix='PREFIX_')

    exp_q = ('{} AND eventTimestamp:>{}'
             .format(kwargs.get('q', ''), timestamp)
             .lstrip(' AND '))

    result = cli.query_logs(**kwargs)

    expected = res['hits']['hits'] if not kwargs.get('raw_result', False) else res

    assert result == expected

    kwargs.pop('raw_result', None)
    kwargs['q'] = exp_q
    kwargs['size'] = 100
    kwargs['indices'] = ['PREFIX_*']
    if 'body' not in kwargs:
        kwargs['body'] = None

    search.assert_called_with(**kwargs)
Example #7
0
  def test_getScaledCPU( self ):
    tl = TimeLeft()
    res = tl.getScaledCPU()
    self.assertEqual( res, 0 )

    tl.scaleFactor = 5.0
    tl.normFactor = 5.0

    for batch, retValue in [( 'LSF', LSF_ReturnValue )]:
      self.tl = importlib.import_module( "DIRAC.Core.Utilities.TimeLeft.TimeLeft" )
      rcMock = MagicMock()
      rcMock.return_value = S_OK( retValue )
      self.tl.runCommand = rcMock

      batchSystemName = '%sTimeLeft' % batch
      batchPlugin = __import__( 'DIRAC.Core.Utilities.TimeLeft.%s' %
                                batchSystemName, globals(), locals(), [batchSystemName] )
      batchStr = 'batchPlugin.%s()' % ( batchSystemName )
      tl.batchPlugin = eval( batchStr )
      res = tl.getScaledCPU()
      self.assertEqual( res, 0.0 )

    for batch, retValue in [( 'SGE', SGE_ReturnValue )]:
      self.tl = importlib.import_module( "DIRAC.Core.Utilities.TimeLeft.TimeLeft" )
      rcMock = MagicMock()
      rcMock.return_value = S_OK( retValue )
      self.tl.runCommand = rcMock

      batchSystemName = '%sTimeLeft' % batch
      batchPlugin = __import__( 'DIRAC.Core.Utilities.TimeLeft.%s' %
                                batchSystemName, globals(), locals(), [batchSystemName] )
      batchStr = 'batchPlugin.%s()' % ( batchSystemName )
      tl.batchPlugin = eval( batchStr )
      res = tl.getScaledCPU()
      self.assertEqual( res, 300.0 )
def test_appdynamics_healthrule_violations_severity(monkeypatch, fx_violations, fx_severity):
    kwargs, violations = fx_violations

    resp = resp_mock(violations)
    get = requests_mock(resp)

    monkeypatch.setattr('requests.Session.get', get)

    start_time = 12345
    end_time = 23456

    mktime_mock = MagicMock()
    time_mock = MagicMock()
    mktime_mock.return_value = start_time
    time_mock.return_value = end_time
    monkeypatch.setattr('time.mktime', mktime_mock)
    monkeypatch.setattr('time.time', time_mock)

    cli = AppdynamicsWrapper(URL, username=USER, password=PASS)

    res = cli.healthrule_violations(APPLICATION, severity=fx_severity, **kwargs)

    assert [v for v in violations if v['severity'] == fx_severity] == res
    assert_client(cli)

    params = kwargs_to_params(kwargs, start_time, end_time)

    get.assert_called_with(cli.healthrule_violations_url(APPLICATION), params=params)
Example #9
0
    def test_getattr_with_correct_path(self):
        mocked_repo = MagicMock()
        mocked_first = MagicMock()
        mocked_last = MagicMock()

        mocked_first.return_value = "tomorrow"
        mocked_last.return_value = "tomorrow"
        mocked_repo.get_commit_dates.return_value = ['/']
        with patch('gitfs.views.history.lru_cache') as mocked_cache:
            mocked_cache.__call__ = lambda f: f

            history = HistoryView(repo=mocked_repo, uid=1, gid=1,
                                  mount_time="now")
            history._get_first_commit_time = mocked_first
            history._get_last_commit_time = mocked_last

            result = history.getattr("/", 1)
            asserted_result = {
                'st_uid': 1,
                'st_gid': 1,
                'st_ctime': "tomorrow",
                'st_mtime': "tomorrow",
                'st_nlink': 2,
                'st_mode': S_IFDIR | 0o555,
            }
            assert asserted_result == result
def test_appdynamics_log_count(monkeypatch, fx_log_count):
    es_url, kwargs, res = fx_log_count

    count = MagicMock()
    count.return_value = res

    monkeypatch.setattr('zmon_worker_monitor.builtins.plugins.appdynamics.ElasticsearchWrapper.count', count)

    timestamp = 1234
    timestamp_mock = MagicMock()
    timestamp_mock.return_value = timestamp
    monkeypatch.setattr(AppdynamicsWrapper, '_AppdynamicsWrapper__get_timestamp', timestamp_mock)

    cli = AppdynamicsWrapper(url=URL, es_url=es_url, username=USER, password=PASS, index_prefix='PREFIX_')

    exp_q = ('{} AND eventTimestamp:>{}'
             .format(kwargs.get('q', ''), timestamp)
             .lstrip(' AND '))

    result = cli.count_logs(**kwargs)

    assert result == res['count']

    kwargs['q'] = exp_q
    kwargs['indices'] = ['PREFIX_*']
    if 'body' not in kwargs:
        kwargs['body'] = None

    count.assert_called_with(**kwargs)
Example #11
0
  def test_getTimeLeft( self ):
#     for batch, retValue in [( 'LSF', LSF_ReturnValue ), ( 'SGE', SGE_ReturnValue )]:

    for batch, retValue in [( 'LSF', LSF_ReturnValue )]:
      self.tl = importlib.import_module( "DIRAC.Core.Utilities.TimeLeft.TimeLeft" )
      rcMock = MagicMock()
      rcMock.return_value = S_OK( retValue )
      self.tl.runCommand = rcMock

      timeMock = MagicMock()

      tl = TimeLeft()
#      res = tl.getTimeLeft()
#      self.assertEqual( res['OK'], True )

      batchSystemName = '%sTimeLeft' % batch
      batchPlugin = __import__( 'DIRAC.Core.Utilities.TimeLeft.%s' %
                                batchSystemName, globals(), locals(), [batchSystemName] )
      batchStr = 'batchPlugin.%s()' % ( batchSystemName )
      tl.batchPlugin = eval( batchStr )

      tl.scaleFactor = 10.0
      tl.normFactor = 10.0
      tl.batchPlugin.bin = '/usr/bin'
      tl.batchPlugin.hostNorm = 10.0
      tl.batchPlugin.cpuLimit = 1000
      tl.batchPlugin.wallClockLimit = 1000

      with patch( "DIRAC.Core.Utilities.TimeLeft.LSFTimeLeft.runCommand", new=rcMock ):
        with patch( "DIRAC.Core.Utilities.TimeLeft.LSFTimeLeft.time", new=timeMock ):
          res = tl.getTimeLeft()
          self.assertEqual( res['OK'], True, res.get('Message', '') )

    for batch, retValue in [( 'SGE', SGE_ReturnValue )]:
      self.tl = importlib.import_module( "DIRAC.Core.Utilities.TimeLeft.TimeLeft" )
      rcMock = MagicMock()
      rcMock.return_value = S_OK( retValue )
      self.tl.runCommand = rcMock

      tl = TimeLeft()
#       res = tl.getTimeLeft()
#       self.assertFalse( res['OK'] )

      batchSystemName = '%sTimeLeft' % batch
      batchPlugin = __import__( 'DIRAC.Core.Utilities.TimeLeft.%s' %
                                batchSystemName, globals(), locals(), [batchSystemName] )
      batchStr = 'batchPlugin.%s()' % ( batchSystemName )
      tl.batchPlugin = eval( batchStr )

      tl.scaleFactor = 10.0
      tl.normFactor = 10.0
      tl.batchPlugin.bin = '/usr/bin'
      tl.batchPlugin.hostNorm = 10.0
      tl.batchPlugin.cpuLimit = 1000
      tl.batchPlugin.wallClockLimit = 1000

      res = tl.getTimeLeft()
      self.assert_( res['OK'] )
      self.assertEqual( res['Value'], 9400.0 )
    def test_check_connection(self):
        """
        Does it only ping the server if the option was set?
        """
        ping = MagicMock()
        dut = MagicMock()
        configuration_mock = MagicMock()
        dut.operating_system = 'linux'
        self.tester._dut = dut
        self.tester._server = MagicMock()
        self.tester._server.TestInterface = 'server'

        # check that it doesn't run
        self.parser.has_section.return_value = False
        #self.tester.configuration.ping = None
        outcome = self.tester.connected()
        with self.assertRaises(AssertionError):
            ping.assert_called_with()
        self.assertTrue(outcome)

        # check that it does run
        self.tester._ping = ping
        mock_time = MagicMock()
        times = [500,4,3, 2, 1]
        # pinged
        mock_time.side_effect = lambda: times.pop()
        with patch('time.time', mock_time):
            ping.return_value = True
            outcome = self.tester.connected()
            ping.assert_called_with()
            
        times = [700, 600, 500,400 ,300, 40,30, 20, 10]
        def time_effects():
            output =  times.pop()
            return output
        
        # timed out
        mock_time.side_effect = time_effects
        #mock_time_2 = MagicMock()
        self.tester._result_location = 'cache'
        with patch('time.time', mock_time):
            ping.return_value = False
            outcome = self.tester.connected()
            ping.assert_called_with()
            self.assertFalse(outcome)
        
        # timed out
        times = [700, 600, 100, 10]
        
        mock_time.side_effect = lambda: times.pop()
        # raises exception if asked to
        start = 0
        attenuation = 0
        with self.assertRaises(CameraobscuraError):
            with patch('time.time', mock_time):
                ping.return_value = False
                self.tester.connected(raise_error=start==attenuation)
        return
def getObject_Mock(hw_id):
    mock = MagicMock()

    return_values = get_raw_hardware_mocks()

    if hw_id in return_values:
        mock.return_value = return_values[hw_id]
    else:
        mock.return_value = {}

    return mock
def getObject_Mock(cci_id):
    mock = MagicMock()

    return_values = get_raw_cci_mocks()

    if cci_id in return_values:
        mock.return_value = return_values[cci_id]
    else:
        mock.return_value = {}

    return mock
Example #15
0
def test_aws_get_running_elbs(monkeypatch):
    get_classic = MagicMock()
    get_classic.return_value = [1, 2]

    get_application = MagicMock()
    get_application.return_value = [3, 4]

    monkeypatch.setattr('zmon_aws_agent.aws.get_running_elbs_classic', get_classic)
    monkeypatch.setattr('zmon_aws_agent.aws.get_running_elbs_application', get_application)

    res = aws.get_running_elbs('r1', 'acc1')

    assert res == [1, 2, 3, 4]
Example #16
0
    def test_mkdir(self):
        from gitfs.views import current as current_view
        old_mkdir = current_view.PassthroughView.mkdir
        old_chmod = current_view.PassthroughView.chmod
        mocked_mkdir = lambda self, path, mode: "done"

        mocked_chmod = MagicMock()
        mocked_chmod.return_value = None

        current_view.PassthroughView.mkdir = mocked_mkdir
        current_view.PassthroughView.chmod = mocked_chmod

        mocked_release = MagicMock()
        mocked_full_path = MagicMock()
        mocked_repo = MagicMock()

        mocked_full_path.return_value = "full_path"
        mocked_repo._full_path = mocked_full_path

        keep_path = '/path/.keep'
        mode = (os.O_WRONLY | os.O_CREAT)

        with patch('gitfs.views.current.os') as mocked_os:
            mocked_os.path.exists.return_value = False
            mocked_os.open.return_value = 10
            mocked_os.O_WRONLY = os.O_WRONLY
            mocked_os.O_CREAT = os.O_CREAT

            current = CurrentView(repo=mocked_repo, uid=1, gid=1,
                                  repo_path="repo_path",
                                  ignore=CachedIgnore())
            current.release = mocked_release

            assert current.mkdir("/path", "mode") == "done"
            mocked_full_path.assert_called_once_with(keep_path)
            mocked_os.path.exists.assert_called_once_with(keep_path)
            mocked_os.open.assert_called_once_with("full_path", mode)
            mocked_chmod.assert_called_once_with(
                keep_path,
                0o644,
            )
            assert current.dirty == {
                10: {
                    'message': "Create the /path directory",
                    'stage': True
                }
            }
            mocked_release.assert_called_once_with(keep_path, 10)

        current_view.PassthroughView.mkdir = old_mkdir
        current_view.PassthroughView.chmod = old_chmod
Example #17
0
def test_dns_query(monkeypatch):
    resolver = MagicMock()
    query = MagicMock()
    query.return_value = ['192.168.20.16']
    resolver.query = query
    resolver_class = MagicMock()
    resolver_class.return_value = resolver
    monkeypatch.setattr('dns.resolver.Resolver', resolver_class)

    dns = DnsWrapper(host=None)
    res = dns.query('google.de', 'A')

    assert res == ['192.168.20.16']
    query.assert_called_with('google.de', 'A')
Example #18
0
    def test_merging_strategy(self):
        mocked_repo = MagicMock()
        mocked_copy = MagicMock()
        mocked_remote_copy = MagicMock()
        mocked_reload = MagicMock()
        mocked_diverge = MagicMock()
        mocked_solve = MagicMock()
        mocked_ref = MagicMock()
        mocked_find_commits = MagicMock()

        mocked_ref.target = "target"
        mocked_diverge.first_commits = [Commit(1, "message", 1)]
        mocked_repo.index.conflicts = "conflicts"
        mocked_repo.lookup_reference.return_value = mocked_ref
        mocked_repo.commit.return_value = "new_commit"
        mocked_repo.find_diverge_commits = mocked_find_commits
        mocked_reload.return_value = "reload"
        mocked_find_commits.return_value = mocked_diverge
        mocked_copy.return_value = "local_copy"
        mocked_remote_copy.return_value = "remote_copy"

        mine = AcceptMine(mocked_repo, author="author", commiter="commiter")

        mine._create_local_copy = mocked_copy
        mine._create_remote_copy = mocked_remote_copy
        mine.reload_branch = mocked_reload
        mine.solve_conflicts = mocked_solve

        mine.__call__("local_branch", "remote_branch", "upstream")

        mocked_copy.assert_called_once_with("local_branch", "merging_local")
        mocked_remote_copy.assert_called_once_with("remote_branch", "upstream",
                                                   "merging_remote")
        mocked_find_commits.assert_called_once_with("local_copy",
                                                    "remote_copy")
        mocked_repo.checkout.has_calls([call("refs/heads/local_branch",
                                             strategy=GIT_CHECKOUT_FORCE)])
        mocked_repo.merge.assert_called_once_with(1)
        mocked_solve.asssert_called_once_with(mocked_repo.index.conflicts)

        asserted_calls = [call("refs/heads/local_branch"),
                          call("refs/heads/merging_local")]
        mocked_repo.lookup_reference.has_calls(asserted_calls)
        mocked_repo.commit.asserted_called_once_with("merging: message",
                                                     "author", "commiter",
                                                     mocked_ref, ["target", 1])
        mocked_repo.create_reference.called_once_with(mocked_ref, "new_commit",
                                                      force=True)
        assert mocked_repo.state_cleanup.call_count == 1
        assert mocked_ref.delete.call_count == 2
Example #19
0
    def test_git_obj_default_stats_with_valid_obj(self):
        mocked_repo = MagicMock()
        mocked_git_obj = MagicMock()
        mocked_size = MagicMock()

        mocked_git_obj.return_value = GIT_FILEMODE_BLOB
        mocked_size.return_value = 10

        repo = Repository(mocked_repo)
        repo.get_git_object_type = mocked_git_obj
        repo.get_blob_size = mocked_size

        assert repo.get_git_object_default_stats("ref", "/ups") == {"st_mode": S_IFREG | 0444, "st_size": 10}
        mocked_size.assert_called_once_with("ref", "/ups")
        mocked_git_obj.assert_called_once_with("ref", "/ups")
Example #20
0
    def test_shape_function(self):

        array_func = MagicMock()
        array_func.return_value = None

        assert self.composite.shape is None

        self.composite.allocate('a')
        self.composite.set('a', array=array_func)

        assert self.composite.shape is None

        array_func.return_value = self.array1

        assert self.composite.shape == (2, 2)
def test_cassandra_execute(monkeypatch, kwargs):
    node = 'cassandra-node'
    keyspace = 'users'

    stmt = 'SELECT'
    result = [1, 2, 3]

    client = MagicMock()
    cluster = MagicMock()
    session = MagicMock()
    auth = MagicMock()

    cluster.return_value = client

    client.connect.return_value = session

    session.execute.return_value = result

    auth.return_value = 'auth'

    monkeypatch.setattr('zmon_worker_monitor.builtins.plugins.cassandra_wrapper.Cluster', cluster)
    monkeypatch.setattr('zmon_worker_monitor.builtins.plugins.cassandra_wrapper.PlainTextAuthProvider', auth)

    cassandra = CassandraWrapper(node, keyspace, **kwargs)

    res = cassandra.execute(stmt)

    assert res == result

    auth_provider = None
    port = 9042
    protocol_version = 3
    if kwargs:
        auth_provider = auth.return_value
        auth.assert_called_with(username=kwargs['username'], password=kwargs['password'])
        port = kwargs['port']
        protocol_version = kwargs['protocol_version']

    cluster.assert_called_with([node], connect_timeout=cassandra.connect_timeout, auth_provider=auth_provider,
                               port=port, protocol_version=protocol_version)

    cassandra = None

    client.connect.assert_called_once()
    client.shutdown.assert_called_once()

    session.set_keyspace.assert_called_with(keyspace)
    session.execute.assert_called_with(stmt)
Example #22
0
    def test_diverge(self):
        mocked_repo = MagicMock()
        mocked_lookup = MagicMock()
        mocked_find = MagicMock()
        mocked_commits = MagicMock()
        mocked_branch_remote = MagicMock(target=1)
        mocked_branch_local = MagicMock(target=2)

        def lookup(reference, opt):
            if "origin/master" == reference:
                return mocked_branch_remote
            return mocked_branch_local

        mocked_commits.second_commits = []
        mocked_commits.first_commits = []
        mocked_find.return_value = mocked_commits
        mocked_lookup = lookup

        repo = Repository(mocked_repo)
        repo.lookup_branch = mocked_lookup
        repo.find_diverge_commits = mocked_find

        assert repo.diverge("origin", "master") == (False, False)
        mocked_find.assert_called_once_with(mocked_branch_local,
                                            mocked_branch_remote)
def test_prepare_time_range_params(monkeypatch, fx_valid_time_range_params):
    kwargs, mock_time_vals, result = fx_valid_time_range_params

    cli = AppdynamicsWrapper(URL, USER, PASS)

    start_time = mock_time_vals['start-time']
    end_time = mock_time_vals['end-time']

    mktime_mock = MagicMock()
    time_mock = MagicMock()
    mktime_mock.return_value = start_time
    time_mock.return_value = end_time
    monkeypatch.setattr('time.mktime', mktime_mock)
    monkeypatch.setattr('time.time', time_mock)

    assert cli._prepare_time_range_params(**kwargs) == result
Example #24
0
 def test_disabled(self, mock_django_timezone: mock.MagicMock,
                   mock_queue_digest_recipient: mock.MagicMock) -> None:
     cutoff = timezone_now()
     # A Tuesday
     mock_django_timezone.return_value = datetime.datetime(year=2016, month=1, day=5)
     enqueue_emails(cutoff)
     mock_queue_digest_recipient.assert_not_called()
Example #25
0
def test_load_data_auto_assigns_label():
    factory = MagicMock()
    result = Data(x=[1, 2, 3], label='')
    factory.return_value = result
    d = df.load_data('test.fits', factory)
    factory.assert_called_once_with('test.fits')
    assert d.label == 'test'
Example #26
0
    def test_getattr(self):
        mocked_full = MagicMock()
        mocked_os = MagicMock()
        mocked_stat = MagicMock()
        mocked_repo = MagicMock()

        mocked_stat.simple = "stat"
        mocked_os.lstat.return_value = mocked_stat
        mocked_full.return_value = "full_path"
        mocked_repo._full_path = mocked_full

        with patch.multiple('gitfs.views.current', os=mocked_os,
                            STATS=['simple']):
            current = CurrentView(repo=mocked_repo, uid=1, gid=1,
                                  repo_path="repo_path",
                                  ignore=CachedIgnore())
            current._full_path = mocked_full

            result = current.getattr("path")
            asserted_result = {
                'st_uid': 1,
                'st_gid': 1,
                'simple': "stat"
            }
            assert result == asserted_result

            mocked_os.lstat.assert_called_once_with("full_path")
            mocked_full.assert_called_once_with("path")
Example #27
0
def test_normal_flow(monkeypatch):

    def _fake_prepare(args, dest):
        db_conn = sqlite3.connect(':memory:')
        db_conn.row_factory = sqlite3.Row
        db_conn.execute(
            'CREATE TABLE searchIndex(id INTEGER PRIMARY KEY, name TEXT, '
            'type TEXT, path TEXT)'
        )
        return 'data', db_conn

    def _yielder():
        yield 'testmethod', 'testpath', 'cm'

    with tempfile.TemporaryDirectory() as td:
        monkeypatch.chdir(td)
        os.mkdir('foo')
        monkeypatch.setattr(sys, 'argv', ['doc2dash', 'foo', '-n', 'bar',
                                          '-a', '-i', 'qux.png'])
        monkeypatch.setattr(main, 'prepare_docset', _fake_prepare)
        dt = MagicMock(detect=lambda _: True)
        dt.name = 'testtype'
        dt.return_value = MagicMock(parse=_yielder)
        monkeypatch.setattr(doc2dash.parsers, 'get_doctype', lambda _: dt)
        with patch('doc2dash.__main__.log.info') as info, \
             patch('os.system') as system, \
             patch('shutil.copy2') as cp:
            main.main()
            # assert mock.call_args_list is None
            out = '\n'.join(call[0][0] for call in info.call_args_list) + '\n'
            assert system.call_args[0] == ('open -a dash "bar.docset"', )
            assert cp.call_args[0] == ('qux.png', 'bar.docset/icon.png')

    assert out == '''\
 def test_setup_ufw(self, mock_grant_access, mock_rsync):
     peer_addr_1 = '10.1.1.1'
     peer_addr_2 = '10.1.1.2'
     client_addrs = ['10.3.3.1', '10.3.3.2','10.3.3.3', 'ubuntu.com']
     ports = [6660, 6661, 6662]
     self.test_config.set('object-server-port', ports[0])
     self.test_config.set('container-server-port', ports[1])
     self.test_config.set('account-server-port', ports[2])
     RelatedUnits = namedtuple('RelatedUnits', 'rid, unit')
     self.iter_units_for_relation_name.return_value = [
             RelatedUnits(rid='rid:1', unit='unit/1'),
             RelatedUnits(rid='rid:1', unit='unit/2'),
             RelatedUnits(rid='rid:1', unit='unit/3'),
             RelatedUnits(rid='rid:1', unit='unit/4')]
     self.ingress_address.side_effect = client_addrs
     context_call = MagicMock()
     context_call.return_value = {'allowed_hosts': '{} {}'
                                  ''.format(peer_addr_1, peer_addr_2)}
     mock_rsync.return_value = context_call
     swift_utils.setup_ufw()
     calls = []
     for addr in [peer_addr_1, peer_addr_2] + client_addrs:
         for port in ports:
             if addr == 'ubuntu.com':
                 calls.append(call('91.189.94.40', port))
             else:
                 calls.append(call(addr, port))
     mock_grant_access.assert_has_calls(calls)
    def test_provider_redirects_on_success_preauth_payment(
            self, mocked_redirect, mocked_post):
        data = MagicMock()
        data.return_value = {
            'token_type': 'test_token_type',
            'access_token': 'test_access_token',
            'payer': {'payer_info': 'test123'},
            'transactions': [
                {'related_resources': [{
                    'sale': {'links': ''},
                    'authorization': {'links': ''}}]}
            ]}
        post = MagicMock()
        post.json = data
        post.status_code = 200
        mocked_post.return_value = post

        request = MagicMock()
        request.GET = {'token': 'test', 'PayerID': '1234'}
        provider = PaypalProvider(
            secret=SECRET, client_id=CLIENT_ID, capture=False)
        provider.process_data(self.payment, request)

        self.assertEqual(self.payment.status, 'preauth')
        self.assertEqual(self.payment.captured_amount, Decimal('0'))
Example #30
0
    def test_list(self):
        # Patch S3Connection and its get_bucket method:
        with patch('gifshare.s3.S3Connection',
                   name='S3Connection') as MockS3Connection:
            mock_get_bucket = MagicMock(name='get_bucket')
            mock_bucket = MagicMock(name='bucket')
            mock_get_bucket.return_value = mock_bucket
            mock_bucket.list.return_value = [
                DummyKey('image1.jpeg'),
                DummyKey('image2.jpeg')
            ]
            MockS3Connection.return_value.get_bucket = mock_get_bucket

            self.bucket = gifshare.s3.Bucket(config_stub)
            keys = list(self.bucket.list())

            self.assertEqual(keys, [
                'http://dummy.web.root/image1.jpeg',
                'http://dummy.web.root/image2.jpeg',
            ])

            MockS3Connection.assert_called_with(
                'dummy-access-id', 'dummy-secret-access-key')
            mock_get_bucket.assert_called_with('not.a.bucket')
            mock_bucket.list.assert_called_once_with()
Example #31
0
    def test_extraction_with_single_result(self):
        # type: () -> None
        with patch.object(SQLAlchemyExtractor,
                          "_get_connection") as mock_connection:
            connection = MagicMock()
            mock_connection.return_value = connection
            sql_execute = MagicMock()
            connection.execute = sql_execute
            table = {
                "schema": "test_schema",
                "name": "test_table",
                "description": "a table for testing",
                "cluster": self.conf[SnowflakeMetadataExtractor.CLUSTER_KEY],
                "is_view": "false",
            }

            sql_execute.return_value = [
                self._union(
                    {
                        "col_name": "col_id1",
                        "col_type": "number",
                        "col_description": "description of id1",
                        "col_sort_order": 0,
                    },
                    table,
                ),
                self._union(
                    {
                        "col_name": "col_id2",
                        "col_type": "number",
                        "col_description": "description of id2",
                        "col_sort_order": 1,
                    },
                    table,
                ),
                self._union(
                    {
                        "col_name": "is_active",
                        "col_type": "boolean",
                        "col_description": None,
                        "col_sort_order": 2,
                    },
                    table,
                ),
                self._union(
                    {
                        "col_name": "source",
                        "col_type": "varchar",
                        "col_description": "description of source",
                        "col_sort_order": 3,
                    },
                    table,
                ),
                self._union(
                    {
                        "col_name": "etl_created_at",
                        "col_type": "timestamp_ltz",
                        "col_description": "description of etl_created_at",
                        "col_sort_order": 4,
                    },
                    table,
                ),
                self._union(
                    {
                        "col_name": "ds",
                        "col_type": "varchar",
                        "col_description": None,
                        "col_sort_order": 5,
                    },
                    table,
                ),
            ]

            extractor = SnowflakeMetadataExtractor()
            extractor.init(self.conf)
            actual = extractor.extract()
            expected = TableMetadata(
                "prod",
                "MY_CLUSTER",
                "test_schema",
                "test_table",
                "a table for testing",
                [
                    ColumnMetadata("col_id1", "description of id1", "number",
                                   0),
                    ColumnMetadata("col_id2", "description of id2", "number",
                                   1),
                    ColumnMetadata("is_active", None, "boolean", 2),
                    ColumnMetadata("source", "description of source",
                                   "varchar", 3),
                    ColumnMetadata(
                        "etl_created_at",
                        "description of etl_created_at",
                        "timestamp_ltz",
                        4,
                    ),
                    ColumnMetadata("ds", None, "varchar", 5),
                ],
            )

            self.assertEqual(expected.__repr__(), actual.__repr__())
            self.assertIsNone(extractor.extract())
Example #32
0
    def test_track_active_mobile_push_notifications(
            self, mock_push_notifications: mock.MagicMock) -> None:
        mock_push_notifications.return_value = True
        self.login(self.example_email("hamlet"))
        user_profile = self.example_user('hamlet')
        stream = self.subscribe(user_profile, "test_stream")
        second_stream = self.subscribe(user_profile, "second_stream")

        property_name = "push_notifications"
        result = self.api_post(
            user_profile.email, "/api/v1/users/me/subscriptions/properties", {
                "subscription_data":
                ujson.dumps([{
                    "property": property_name,
                    "value": True,
                    "stream_id": stream.id
                }])
            })
        result = self.api_post(
            user_profile.email, "/api/v1/users/me/subscriptions/properties", {
                "subscription_data":
                ujson.dumps([{
                    "property": property_name,
                    "value": True,
                    "stream_id": second_stream.id
                }])
            })
        self.assert_json_success(result)
        self.assertEqual(self.get_mobile_push_notification_ids(user_profile),
                         [])

        message_id = self.send_stream_message(self.example_email("cordelia"),
                                              "test_stream", "hello",
                                              "test_topic")
        second_message_id = self.send_stream_message(
            self.example_email("cordelia"), "test_stream", "hello",
            "other_topic")
        third_message_id = self.send_stream_message(
            self.example_email("cordelia"), "second_stream", "hello",
            "test_topic")

        self.assertEqual(self.get_mobile_push_notification_ids(user_profile),
                         [message_id, second_message_id, third_message_id])

        result = self.client_post("/json/mark_topic_as_read", {
            "stream_id": str(stream.id),
            "topic_name": "test_topic",
        })

        self.assert_json_success(result)
        self.assertEqual(self.get_mobile_push_notification_ids(user_profile),
                         [second_message_id, third_message_id])

        result = self.client_post("/json/mark_stream_as_read", {
            "stream_id": str(stream.id),
            "topic_name": "test_topic",
        })
        self.assertEqual(self.get_mobile_push_notification_ids(user_profile),
                         [third_message_id])

        fourth_message_id = self.send_stream_message(
            self.example_email("cordelia"), "test_stream", "hello",
            "test_topic")
        self.assertEqual(self.get_mobile_push_notification_ids(user_profile),
                         [third_message_id, fourth_message_id])

        result = self.client_post("/json/mark_all_as_read", {})
        self.assertEqual(self.get_mobile_push_notification_ids(user_profile),
                         [])
        mock_push_notifications.assert_called()
Example #33
0
import importlib
import os
import shutil

from mock import MagicMock, patch

from DIRAC import gLogger

from DIRAC.DataManagementSystem.Client.test.mock_DM import dm_mock
from DIRAC.Resources.Catalog.test.mock_FC import fc_mock

from DIRAC.WorkloadManagementSystem.JobWrapper.JobWrapper import JobWrapper
from DIRAC.WorkloadManagementSystem.JobWrapper.WatchdogLinux import WatchdogLinux

getSystemSectionMock = MagicMock()
getSystemSectionMock.return_value = 'aValue'


class JobWrapperTestCase(unittest.TestCase):
    """ Base class for the JobWrapper test cases
  """
    def setUp(self):
        gLogger.setLevel('DEBUG')

    def tearDown(self):
        for f in ['std.out']:
            try:
                os.remove(f)
            except OSError:
                pass
Example #34
0
def patch_safehaven(monkeypatch):
    """Patch SafeHaven."""
    mock_haven = MagicMock()
    mock_haven.return_value = mock_haven
    monkeypatch.setattr("runway.cfngin.cfngin.SafeHaven", mock_haven)
    return mock_haven
    def test_extraction_with_single_result(self):
        # type: () -> None
        with patch.object(SQLAlchemyExtractor, '_get_connection') as mock_connection:
            connection = MagicMock()
            mock_connection.return_value = connection
            sql_execute = MagicMock()
            connection.execute = sql_execute
            table = {'schema_name': 'test_schema',
                     'name': 'test_table',
                     'description': '',
                     'cluster': self.conf['extractor.athena_metadata.{}'.format(AthenaMetadataExtractor.CATALOG_KEY)],
                     }

            sql_execute.return_value = [
                self._union(
                    {'col_name': 'col_id1',
                     'col_type': 'bigint',
                     'col_description': 'description of id1',
                     'col_sort_order': 0,
                     'extras': None}, table),
                self._union(
                    {'col_name': 'col_id2',
                     'col_type': 'bigint',
                     'col_description': 'description of id2',
                     'col_sort_order': 1,
                     'extras': None}, table),
                self._union(
                    {'col_name': 'is_active',
                     'col_type': 'boolean',
                     'col_description': None,
                     'col_sort_order': 2,
                     'extras': None}, table),
                self._union(
                    {'col_name': 'source',
                     'col_type': 'varchar',
                     'col_description': 'description of source',
                     'col_sort_order': 3,
                     'extras': None}, table),
                self._union(
                    {'col_name': 'etl_created_at',
                     'col_type': 'timestamp',
                     'col_description': None,
                     'col_sort_order': 4,
                     'extras': 'partition key'}, table),
                self._union(
                    {'col_name': 'ds',
                     'col_type': 'varchar',
                     'col_description': None,
                     'col_sort_order': 5,
                     'extras': None}, table)
            ]

            extractor = AthenaMetadataExtractor()
            extractor.init(self.conf)
            actual = extractor.extract()
            expected = TableMetadata('athena', self.conf['extractor.athena_metadata.{}'.
                                     format(AthenaMetadataExtractor.CATALOG_KEY)], 'test_schema', 'test_table', '',
                                     [ColumnMetadata('col_id1', 'description of id1', 'bigint', 0),
                                      ColumnMetadata('col_id2', 'description of id2', 'bigint', 1),
                                      ColumnMetadata('is_active', None, 'boolean', 2),
                                      ColumnMetadata('source', 'description of source', 'varchar', 3),
                                      ColumnMetadata('etl_created_at', 'partition key', 'timestamp', 4),
                                      ColumnMetadata('ds', None, 'varchar', 5)])
            self.assertEqual(expected.__repr__(), actual.__repr__())
            self.assertIsNone(extractor.extract())
Example #36
0
def test_should_return_true_on_compare_success(mock_checkpw: MagicMock,
                                               sut: BcryptAdapter):
    mock_checkpw.return_value = True
    result = sut.compare("any_value", "any_hash")
    assert result
Example #37
0
    def test_prepare_components(self):
        mocked_argparse = MagicMock()
        mocked_parser = MagicMock()
        mocked_args = MagicMock()
        mocked_queue = MagicMock()
        mocked_router = MagicMock()
        mocked_routes = MagicMock()
        mocked_merger = MagicMock()
        mocked_fetcher = MagicMock()
        mocked_fuse = MagicMock()
        mocked_merge_worker = MagicMock()
        mocked_fetch_worker = MagicMock()

        args = EmptyObject(
            **{
                'remote_url': 'remote_url',
                'mount_point': 'mount_point',
                'username': '******',
                'password': '',
                'ssh_key': '/home/user/.ssh/id_rsa',
                'ssh_user': '******',
                'foreground': True,
                'allow_root': True,
                'allow_others': True,
                'repo_path': 'repo_path',
                'branch': 'branch',
                'user': '******',
                'group': 'group',
                'max_size': 'max_size',
                'max_offset': 'max_offset',
                'upstream': 'origin',
                'fetch_timeout': 10,
                'merge_timeout': 10,
                'commiter_name': 'commit',
                'commiter_email': '*****@*****.**',
                'log': 'syslog',
                'ignore_file': '',
                'module_file': '',
                'hard_ignore': None,
                'min_idle_times': 1,
                'idle_fetch_timeout': 10,
            })

        mocked_argparse.Argumentparser.return_value = mocked_parser
        mocked_args.return_value = args
        mocked_merger.return_value = mocked_merge_worker
        mocked_fetcher.return_value = mocked_fetch_worker
        mocked_router.repo = 'repo'
        mocked_router.repo_path = 'repo_path'

        with patch.multiple('gitfs.mounter',
                            CommitQueue=MagicMock(return_value=mocked_queue),
                            Router=MagicMock(return_value=mocked_router),
                            routes=mocked_routes,
                            SyncWorker=mocked_merger,
                            FetchWorker=mocked_fetcher,
                            FUSE=mocked_fuse,
                            get_credentials=MagicMock(return_value='cred')):

            assert_result = (mocked_merge_worker, mocked_fetch_worker,
                             mocked_router)

            assert prepare_components(args) == assert_result
            mocked_fetcher.assert_called_once_with(upstream='origin',
                                                   branch='branch',
                                                   repository='repo',
                                                   timeout=10,
                                                   idle_timeout=10,
                                                   credentials='cred')

            asserted_call = {
                'repository': 'repo',
                'upstream': 'origin',
                'branch': 'branch',
                'timeout': 10,
                'repo_path': 'repo_path',
                'commit_queue': mocked_queue,
                'credentials': 'cred',
                'min_idle_times': 1,
            }
            mocked_merger.assert_called_once_with('commit',
                                                  '*****@*****.**',
                                                  'commit',
                                                  '*****@*****.**',
                                                  **asserted_call)
Example #38
0
 def test_device_bind_kvarg(self):
     self.dev.bind()
     mock = MagicMock()
     mock.return_value = 'Test'
     self.dev.bind(kw=mock)
     self.assertEqual(self.dev.kw, 'Test')
Example #39
0
from __future__ import division
from __future__ import print_function

# imports
from mock import MagicMock

# DIRAC Components
from DIRAC.ResourceStatusSystem.Agent.SiteInspectorAgent import SiteInspectorAgent
from DIRAC import gLogger

gLogger.setLevel("DEBUG")

# Mock Objects
mockAM = MagicMock()
mockNone = MagicMock()
mockNone.return_value = None
mockSM = MagicMock()

site = {
    "status": "status",
    "name": "site",
    "vO": "some_vo",
    "site": "site",
    "element": "Site",
    "statusType": "all",
    "elementType": "Site",
}


def test__execute(mocker):
    """Testing SiteInspectorAgent.execute()"""
Example #40
0
 def setUp(self, mock_get_proxy_client: MagicMock) -> None:
     self.mock_client = mock.Mock()
     mock_get_proxy_client.return_value = self.mock_client
     self.api = UserFollowsAPI()
    def test_request(self, mock_function: MagicMock) -> None:
        mock_function.return_value = None
        """A normal request is handled properly"""
        record = self.simulate_error()

        report = self.run_handler(record)
        self.assertIn("user_email", report)
        self.assertIn("message", report)
        self.assertIn("stack_trace", report)

        # Test that `add_request_metadata` throwing an exception is fine
        with patch("zerver.logging_handlers.traceback.print_exc"):
            with patch("zerver.logging_handlers.add_request_metadata",
                       side_effect=Exception("Unexpected exception!")):
                report = self.run_handler(record)
        self.assertNotIn("user_email", report)
        self.assertIn("message", report)
        self.assertEqual(report["stack_trace"],
                         "See /var/log/zulip/errors.log")

        # Check anonymous user is handled correctly
        record.request.user = AnonymousUser(
        )  # type: ignore # this field is dynamically added
        report = self.run_handler(record)
        self.assertIn("host", report)
        self.assertIn("user_email", report)
        self.assertIn("message", report)
        self.assertIn("stack_trace", report)

        # Now simulate a DisallowedHost exception
        def get_host_error() -> None:
            raise Exception("Get Host Failure!")

        orig_get_host = record.request.get_host  # type: ignore # this field is dynamically added
        record.request.get_host = get_host_error  # type: ignore # this field is dynamically added
        report = self.run_handler(record)
        record.request.get_host = orig_get_host  # type: ignore # this field is dynamically added
        self.assertIn("host", report)
        self.assertIn("user_email", report)
        self.assertIn("message", report)
        self.assertIn("stack_trace", report)

        # Test an exception_filter exception
        with patch("zerver.logging_handlers.get_exception_reporter_filter",
                   return_value=15):
            record.request.method = "POST"  # type: ignore # this field is dynamically added
            report = self.run_handler(record)
            record.request.method = "GET"  # type: ignore # this field is dynamically added
        self.assertIn("host", report)
        self.assertIn("user_email", report)
        self.assertIn("message", report)
        self.assertIn("stack_trace", report)

        # Test the catch-all exception handler doesn't throw
        with patch('zerver.lib.error_notify.notify_server_error',
                   side_effect=Exception("queue error")):
            self.handler.emit(record)
        with self.settings(STAGING_ERROR_NOTIFICATIONS=False):
            with patch('zerver.logging_handlers.queue_json_publish',
                       side_effect=Exception("queue error")):
                self.handler.emit(record)

        # Test no exc_info
        record.exc_info = None
        report = self.run_handler(record)
        self.assertIn("host", report)
        self.assertIn("user_email", report)
        self.assertIn("message", report)
        self.assertEqual(report["stack_trace"], 'No stack trace available')

        # Test arbitrary exceptions from request.user
        record.request.user = None  # type: ignore # this field is dynamically added
        with patch("zerver.logging_handlers.traceback.print_exc"):
            report = self.run_handler(record)
        self.assertIn("host", report)
        self.assertIn("user_email", report)
        self.assertIn("message", report)
        self.assertIn("stack_trace", report)
Example #42
0
from __future__ import absolute_import
from datetime import datetime, tzinfo, timedelta
from unittest import TestCase
from mock import patch, MagicMock

import sqlalchemy as sa
from sqlalchemy.exc import DBAPIError
from sqlalchemy.orm import Session
from sqlalchemy.ext.declarative import declarative_base

from crate.client.cursor import Cursor


fake_cursor = MagicMock(name='fake_cursor')
FakeCursor = MagicMock(name='FakeCursor', spec=Cursor)
FakeCursor.return_value = fake_cursor


class CST(tzinfo):
    """
    Timezone object for CST
    """

    def utcoffset(self, date_time):
        return timedelta(seconds=-3600)

    def dst(self, date_time):
        return timedelta(seconds=-7200)


@patch('crate.client.connection.Cursor', FakeCursor)
"""

# pylint: disable=protected-access

# imports
import pytest
from mock import MagicMock

from DIRAC import gLogger

# sut
from DIRAC.WorkloadManagementSystem.Agent.SiteDirector import SiteDirector

mockAM = MagicMock()
mockGCReply = MagicMock()
mockGCReply.return_value = 'TestSetup'
mockOPSObject = MagicMock()
mockOPSObject.getValue.return_value = '123'
mockOPSReply = MagicMock()
mockOPSReply.return_value = '123'

mockOPS = MagicMock()
mockOPS.return_value = mockOPSObject
# mockOPS.Operations = mockOPSObject
mockPM = MagicMock()
mockPM.requestToken.return_value = {'OK': True, 'Value': ('token', 1)}
mockPMReply = MagicMock()
mockPMReply.return_value = {'OK': True, 'Value': ('token', 1)}

mockCSGlobalReply = MagicMock()
mockCSGlobalReply.return_value = 'TestSetup'
Example #44
0
    def test_getTimeLeft(self):
        #     for batch, retValue in [( 'LSF', LSF_ReturnValue ), ( 'SGE', SGE_ReturnValue )]:

        for batch, retValue in [('LSF', LSF_ReturnValue)]:
            self.tl = importlib.import_module(
                "DIRAC.Core.Utilities.TimeLeft.TimeLeft")
            rcMock = MagicMock()
            rcMock.return_value = S_OK(retValue)
            self.tl.runCommand = rcMock

            timeMock = MagicMock()

            tl = TimeLeft()
            #      res = tl.getTimeLeft()
            #      self.assertEqual( res['OK'], True )

            batchSystemName = '%sTimeLeft' % batch
            batchPlugin = __import__(
                'DIRAC.Core.Utilities.TimeLeft.%s' % batchSystemName,
                globals(), locals(), [batchSystemName])
            batchStr = 'batchPlugin.%s()' % (batchSystemName)
            tl.batchPlugin = eval(batchStr)

            tl.scaleFactor = 10.0
            tl.normFactor = 10.0
            tl.batchPlugin.bin = '/usr/bin'
            tl.batchPlugin.hostNorm = 10.0
            tl.batchPlugin.cpuLimit = 1000
            tl.batchPlugin.wallClockLimit = 1000

            with patch("DIRAC.Core.Utilities.TimeLeft.LSFTimeLeft.runCommand",
                       new=rcMock):
                with patch("DIRAC.Core.Utilities.TimeLeft.LSFTimeLeft.time",
                           new=timeMock):
                    res = tl.getTimeLeft()
                    self.assertEqual(res['OK'], True, res.get('Message', ''))

        for batch, retValue in [('SGE', SGE_ReturnValue)]:
            self.tl = importlib.import_module(
                "DIRAC.Core.Utilities.TimeLeft.TimeLeft")
            rcMock = MagicMock()
            rcMock.return_value = S_OK(retValue)
            self.tl.runCommand = rcMock

            tl = TimeLeft()
            #       res = tl.getTimeLeft()
            #       self.assertFalse( res['OK'] )

            batchSystemName = '%sTimeLeft' % batch
            batchPlugin = __import__(
                'DIRAC.Core.Utilities.TimeLeft.%s' % batchSystemName,
                globals(), locals(), [batchSystemName])
            batchStr = 'batchPlugin.%s()' % (batchSystemName)
            tl.batchPlugin = eval(batchStr)

            tl.scaleFactor = 10.0
            tl.normFactor = 10.0
            tl.batchPlugin.bin = '/usr/bin'
            tl.batchPlugin.hostNorm = 10.0
            tl.batchPlugin.cpuLimit = 1000
            tl.batchPlugin.wallClockLimit = 1000

            res = tl.getTimeLeft()
            self.assertTrue(res['OK'])
            self.assertEqual(res['Value'], 9400.0)
Example #45
0
    def test_getScaledCPU(self):
        tl = TimeLeft()
        res = tl.getScaledCPU()
        self.assertEqual(res, 0)

        tl.scaleFactor = 5.0
        tl.normFactor = 5.0

        for batch, retValue in [('LSF', LSF_ReturnValue)]:
            self.tl = importlib.import_module(
                "DIRAC.Core.Utilities.TimeLeft.TimeLeft")
            rcMock = MagicMock()
            rcMock.return_value = S_OK(retValue)
            self.tl.runCommand = rcMock

            batchSystemName = '%sTimeLeft' % batch
            batchPlugin = __import__(
                'DIRAC.Core.Utilities.TimeLeft.%s' %  #pylint: disable=unused-variable
                batchSystemName,
                globals(),
                locals(),
                [batchSystemName])
            batchStr = 'batchPlugin.%s()' % (batchSystemName)
            tl.batchPlugin = eval(batchStr)
            res = tl.getScaledCPU()
            self.assertEqual(res, 0.0)

        for batch, retValue in [('SGE', SGE_ReturnValue)]:
            self.tl = importlib.import_module(
                "DIRAC.Core.Utilities.TimeLeft.TimeLeft")
            rcMock = MagicMock()
            rcMock.return_value = S_OK(retValue)
            self.tl.runCommand = rcMock

            batchSystemName = '%sTimeLeft' % batch
            batchPlugin = __import__(
                'DIRAC.Core.Utilities.TimeLeft.%s' %  #pylint: disable=unused-variable
                batchSystemName,
                globals(),
                locals(),
                [batchSystemName])
            batchStr = 'batchPlugin.%s()' % (batchSystemName)
            tl.batchPlugin = eval(batchStr)
            res = tl.getScaledCPU()
            self.assertEqual(res, 300.0)

            for batch, retValue in [('MJF', MJF_ReturnValue)]:
                self.tl = importlib.import_module(
                    "DIRAC.Core.Utilities.TimeLeft.TimeLeft")
                rcMock = MagicMock()
                rcMock.return_value = S_OK(retValue)
                self.tl.runCommand = rcMock

                batchSystemName = '%sTimeLeft' % batch
                batchPlugin = __import__(
                    'DIRAC.Core.Utilities.TimeLeft.%s' %  #pylint: disable=unused-variable
                    batchSystemName,
                    globals(),
                    locals(),
                    [batchSystemName])
                batchStr = 'batchPlugin.%s()' % (batchSystemName)
                tl.batchPlugin = eval(batchStr)
                res = tl.getScaledCPU()
                self.assertEqual(res, 0.0)
Example #46
0
def test_http_prometheus_flat(monkeypatch):
    resp = MagicMock()
    resp.text = '''
# HELP http_server_requests_seconds
# TYPE http_server_requests_seconds summary
http_server_requests_seconds{exception="None",method="GET",status="200",uri="/api/hello",quantile="0.95",} 0.003080192
http_server_requests_seconds{exception="None",method="GET",status="200",uri="/api/hello",quantile="0.99",} 0.071237632
http_server_requests_seconds_count{exception="None",method="GET",status="200",uri="/api/hello",} 20.0
http_server_requests_seconds_sum{exception="None",method="GET",status="200",uri="/api/hello",} 0.103182669
# HELP http_server_requests_seconds_max
# TYPE http_server_requests_seconds_max gauge
http_server_requests_seconds_max{exception="None",method="GET",status="200",uri="/api/hello",} 0.067652582
# HELP jvm_memory_committed_bytes The amount of memory in bytes that is committed for  the Java virtual machine to use
# TYPE jvm_memory_committed_bytes gauge
jvm_memory_committed_bytes{area="nonheap",id="Code Cache",} 1.9070976E7
jvm_memory_committed_bytes{area="nonheap",id="Metaspace",} 5.5574528E7
jvm_memory_committed_bytes{area="nonheap",id="Compressed Class Space",} 7340032.0
jvm_memory_committed_bytes{area="heap",id="PS Eden Space",} 2.84688384E8
jvm_memory_committed_bytes{area="heap",id="PS Survivor Space",} 1.6252928E7
jvm_memory_committed_bytes{area="heap",id="PS Old Gen",} 2.3855104E8
# HELP httpsessions_max httpsessions_max
# TYPE httpsessions_max gauge
httpsessions_max -1.0
# HELP httpsessions_active httpsessions_active
# TYPE httpsessions_active gauge
httpsessions_active 0.0
# HELP mem mem
# TYPE mem gauge
mem 370583.0
# HELP mem_free mem_free
# TYPE mem_free gauge
mem_free 176263.0
# HELP processors processors
# TYPE processors gauge
processors 8.0

'''
    get = MagicMock()
    get.return_value = resp
    monkeypatch.setattr('requests.get', get)
    http = HttpWrapper('http://example.org/prometheus/')
    expected = {
        u'jvm_memory_committed_bytes': {
            u'area.nonheap.id.Code Cache': 1.9070976E7,
            u'area.nonheap.id.Metaspace': 5.5574528E7,
            u'area.nonheap.id.Compressed Class Space': 7340032.0,
            u'area.heap.id.PS Eden Space': 2.84688384E8,
            u'area.heap.id.PS Survivor Space': 1.6252928E7,
            u'area.heap.id.PS Old Gen': 2.3855104E8
        },
        u'httpsessions_max': -1.0,
        u'httpsessions_active': 0.0,
        u'mem': 370583.0,
        u'mem_free': 176263.0,
        u'processors': 8.0,
        u'http_server_requests_seconds': {
            u'exception.None.method.GET.quantile.0.95.status.200.uri./api/hello': 0.003080192,
            u'exception.None.method.GET.quantile.0.99.status.200.uri./api/hello': 0.071237632,
        },
        u'http_server_requests_seconds_count': {
            u'exception.None.method.GET.status.200.uri./api/hello': 20.0
        },
        u'http_server_requests_seconds_max': {
            u'exception.None.method.GET.status.200.uri./api/hello': 0.067652582
        },
        u'http_server_requests_seconds_sum': {
            u'exception.None.method.GET.status.200.uri./api/hello': 0.103182669
        }
    }
    assert expected == http.prometheus_flat()
def test_should_403_if_load_survey_by_id_returns_none(
        mock_load_by_id: MagicMock, sut: LoadSurveyResultController):
    mock_load_by_id.return_value = None
    http_response = sut.handle(HttpRequest(params=dict(survey_id="any_id")))
    assert http_response.status_code == 403
    assert http_response.body["message"] == "Invalid param: survey_id"
Example #48
0
def test_should_return_hash_on_hash_success(mock_hashpw: MagicMock,
                                            sut: BcryptAdapter):
    value = "hash"
    mock_hashpw.return_value = value.encode("utf-8")
    hash = sut.hash("any_value")
    assert hash == "hash"
    def test_extraction_with_multiple_result(self):
        # type: () -> None
        with patch.object(SQLAlchemyExtractor, '_get_connection') as mock_connection:
            connection = MagicMock()
            mock_connection.return_value = connection
            sql_execute = MagicMock()
            connection.execute = sql_execute
            table = {'schema_name': 'test_schema1',
                     'name': 'test_table1',
                     'description': 'test table 1',
                     'cluster':
                         self.conf['extractor.mssql_metadata.{}'.format(MSSQLMetadataExtractor.CLUSTER_KEY)]
                     }

            table1 = {'schema_name': 'test_schema1',
                      'name': 'test_table2',
                      'description': 'test table 2',
                      'cluster':
                          self.conf['extractor.mssql_metadata.{}'.format(MSSQLMetadataExtractor.CLUSTER_KEY)]
                      }

            table2 = {'schema_name': 'test_schema2',
                      'name': 'test_table3',
                      'description': 'test table 3',
                      'cluster':
                          self.conf['extractor.mssql_metadata.{}'.format(MSSQLMetadataExtractor.CLUSTER_KEY)]
                      }

            sql_execute.return_value = [
                self._union(
                    {'col_name': 'col_id1',
                     'col_type': 'bigint',
                     'col_description': 'description of col_id1',
                     'col_sort_order': 0}, table),
                self._union(
                    {'col_name': 'col_id2',
                     'col_type': 'bigint',
                     'col_description': 'description of col_id2',
                     'col_sort_order': 1}, table),
                self._union(
                    {'col_name': 'is_active',
                     'col_type': 'boolean',
                     'col_description': None,
                     'col_sort_order': 2}, table),
                self._union(
                    {'col_name': 'source',
                     'col_type': 'varchar',
                     'col_description': 'description of source',
                     'col_sort_order': 3}, table),
                self._union(
                    {'col_name': 'etl_created_at',
                     'col_type': 'timestamp',
                     'col_description': 'description of etl_created_at',
                     'col_sort_order': 4}, table),
                self._union(
                    {'col_name': 'ds',
                     'col_type': 'varchar',
                     'col_description': None,
                     'col_sort_order': 5}, table),
                self._union(
                    {'col_name': 'col_name',
                     'col_type': 'varchar',
                     'col_description': 'description of col_name',
                     'col_sort_order': 0}, table1),
                self._union(
                    {'col_name': 'col_name2',
                     'col_type': 'varchar',
                     'col_description': 'description of col_name2',
                     'col_sort_order': 1}, table1),
                self._union(
                    {'col_name': 'col_id3',
                     'col_type': 'varchar',
                     'col_description': 'description of col_id3',
                     'col_sort_order': 0}, table2),
                self._union(
                    {'col_name': 'col_name3',
                     'col_type': 'varchar',
                     'col_description': 'description of col_name3',
                     'col_sort_order': 1}, table2)
            ]

            extractor = MSSQLMetadataExtractor()
            extractor.init(self.conf)

            expected = TableMetadata('mssql',
                                     self.conf['extractor.mssql_metadata.{}'.format(
                                         MSSQLMetadataExtractor.CLUSTER_KEY)],
                                     'test_schema1', 'test_table1', 'test table 1',
                                     [ColumnMetadata('col_id1', 'description of col_id1', 'bigint', 0),
                                      ColumnMetadata('col_id2', 'description of col_id2', 'bigint', 1),
                                      ColumnMetadata('is_active', None, 'boolean', 2),
                                      ColumnMetadata('source', 'description of source', 'varchar', 3),
                                      ColumnMetadata('etl_created_at', 'description of etl_created_at', 'timestamp', 4),
                                      ColumnMetadata('ds', None, 'varchar', 5)], tags='test_schema1')
            self.assertEqual(expected.__repr__(), extractor.extract().__repr__())

            expected = TableMetadata('mssql',
                                     self.conf['extractor.mssql_metadata.{}'.format(
                                         MSSQLMetadataExtractor.CLUSTER_KEY)],
                                     'test_schema1', 'test_table2', 'test table 2',
                                     [ColumnMetadata('col_name', 'description of col_name', 'varchar', 0),
                                      ColumnMetadata('col_name2', 'description of col_name2', 'varchar', 1)],
                                     tags='test_schema1')
            self.assertEqual(expected.__repr__(), extractor.extract().__repr__())

            expected = TableMetadata('mssql',
                                     self.conf['extractor.mssql_metadata.{}'.format(
                                         MSSQLMetadataExtractor.CLUSTER_KEY)],
                                     'test_schema2', 'test_table3', 'test table 3',
                                     [ColumnMetadata('col_id3', 'description of col_id3', 'varchar', 0),
                                      ColumnMetadata('col_name3', 'description of col_name3',
                                                     'varchar', 1)], tags='test_schema2')
            self.assertEqual(expected.__repr__(), extractor.extract().__repr__())

            self.assertIsNone(extractor.extract())
            self.assertIsNone(extractor.extract())
Example #50
0
    def test_prepare_components(self):
        mocked_argparse = MagicMock()
        mocked_parser = MagicMock()
        mocked_args = MagicMock()
        mocked_queue = MagicMock()
        mocked_router = MagicMock()
        mocked_routes = MagicMock()
        mocked_merger = MagicMock()
        mocked_fetcher = MagicMock()
        mocked_fuse = MagicMock()
        mocked_merge_worker = MagicMock()
        mocked_fetch_worker = MagicMock()

        args = EmptyObject(
            **{
                "remote_url": "remote_url",
                "mount_point": "mount_point",
                "username": "******",
                "current_path": "current",
                "history_path": "history",
                "password": "",
                "ssh_key": "/home/user/.ssh/id_rsa",
                "ssh_user": "******",
                "foreground": True,
                "allow_root": True,
                "allow_others": True,
                "repo_path": "repo_path",
                "branch": "branch",
                "user": "******",
                "group": "group",
                "max_size": "max_size",
                "max_offset": "max_offset",
                "upstream": "origin",
                "fetch_timeout": 10,
                "merge_timeout": 10,
                "commiter_name": "commit",
                "commiter_email": "*****@*****.**",
                "log": "syslog",
                "ignore_file": "",
                "module_file": "",
                "hard_ignore": None,
                "min_idle_times": 1,
                "idle_fetch_timeout": 10,
            })

        mocked_argparse.Argumentparser.return_value = mocked_parser
        mocked_args.return_value = args
        mocked_merger.return_value = mocked_merge_worker
        mocked_fetcher.return_value = mocked_fetch_worker
        mocked_router.repo = "repo"
        mocked_router.repo_path = "repo_path"

        with patch.multiple(
                "gitfs.mounter",
                CommitQueue=MagicMock(return_value=mocked_queue),
                Router=MagicMock(return_value=mocked_router),
                prepare_routes=mocked_routes,
                SyncWorker=mocked_merger,
                FetchWorker=mocked_fetcher,
                FUSE=mocked_fuse,
                get_credentials=MagicMock(return_value="cred"),
        ):

            assert_result = (mocked_merge_worker, mocked_fetch_worker,
                             mocked_router)

            assert prepare_components(args) == assert_result
            mocked_fetcher.assert_called_once_with(
                upstream="origin",
                branch="branch",
                repository="repo",
                timeout=10,
                idle_timeout=10,
                credentials="cred",
            )

            asserted_call = {
                "repository": "repo",
                "upstream": "origin",
                "branch": "branch",
                "timeout": 10,
                "repo_path": "repo_path",
                "commit_queue": mocked_queue,
                "credentials": "cred",
                "min_idle_times": 1,
            }
            mocked_merger.assert_called_once_with("commit",
                                                  "*****@*****.**",
                                                  "commit",
                                                  "*****@*****.**",
                                                  **asserted_call)
def _convert_step(name, supported_types, response=None):
    step = MagicMock(name=name)
    step.get_supported_types.return_value = supported_types
    if response:
        step.return_value = response
    return step
Example #52
0
import pytest
import json
from unittest import mock
from mock import patch, Mock, MagicMock
import unittest
import tweepy
from tweepy import Cursor

from src.config import API_key, API_secret_key, Access_token, Secret_access_token
from src.main import TwitterAuth, TwitterClient, TwitterStreamer

subject = TwitterClient()

### Mock API
api_mock = MagicMock(spec=tweepy.API)
api_mock.return_value = api_mock


@patch('tweepy.API', api_mock)
def test_get_twitter_client_API():
    response = subject.get_twitter_client_api()
    assert response != None


def get_mock_cursor():
    with open('./src/example_tweet_object.json') as f:
        for line in f:
            tweet = json.loads(line)
            mock_status = MagicMock()
            mock_status.__iter__.json.return_value = tweet
            mock_cursor = MagicMock()
def mocked_notify_dropbox(dropbox):
    from mock import MagicMock
    mocked_notify = MagicMock()
    mocked_notify.return_value = 1
    dropbox._notify_editors = mocked_notify
    return dropbox
Example #54
0
 def mock_xml_is_valid_check(part):
     mock_xml_layout_value = MagicMock(name="mock_xml_layout_value")
     mock_xml_layout_value.return_value = True
     part._check_xml_is_valid = mock_xml_layout_value
Example #55
0

_input = MagicMock(name='_input')

_expanduser = MagicMock(name='expanduser')

_sleep = MagicMock(name='sleep')

_call = MagicMock(name='call')

_Popen = MagicMock(name='Popen')

_open = MagicMock(name='open')

_getuid = MagicMock(name='getuid')
_getuid.return_value = 1


@patch('pontoon.pontoon.Pontoon.request', _request)
class TestRender:

    client = Pontoon('foo', 'bar')

    def test_render(self):
        with raises(ClientException):
            self.client.render('foo', "/foo/%s/bar" % Struct)

        with raises(ClientException):
            self.client.render('foo', '/foo/100/bar')

Example #56
0
    def test_update_post_content(self, authorized, validation_error):
        datarequest_id = 'this-represents-an-uuidv4()'

        original_dr = {
            'id': datarequest_id,
            'title': 'A completly different title',
            'description': 'Other description'
        }

        # Set up the get_action function
        show_datarequest = MagicMock(return_value=original_dr)
        update_datarequest = MagicMock()

        def _get_action(action):
            if action == constants.SHOW_DATAREQUEST:
                return show_datarequest
            else:
                return update_datarequest

        controller.tk.get_action.side_effect = _get_action

        # Raise exception if the user is not authorized to create a new data request
        if not authorized:
            controller.tk.check_access.side_effect = controller.tk.NotAuthorized(
                'User not authorized')

        # Raise exception when the user input is not valid
        if validation_error:
            update_datarequest.side_effect = controller.tk.ValidationError({
                'Title': ['error1', 'error2'],
                'Description': ['error3, error4']
            })
        else:
            update_datarequest.return_value = {'id': datarequest_id}

        # Create the request
        request_data = controller.request.POST = {
            'id': datarequest_id,
            'title': 'Example Title',
            'description': 'Example Description',
            'organization_id': 'organization uuid4'
        }
        result = self.controller_instance.update(datarequest_id)

        # Authorize function has been called
        controller.tk.check_access.assert_called_once_with(
            constants.UPDATE_DATAREQUEST, self.expected_context,
            {'id': datarequest_id})

        if authorized:
            self.assertEquals(0, controller.tk.abort.call_count)
            self.assertEquals(controller.tk.render.return_value, result)
            controller.tk.render.assert_called_once_with(
                'datarequests/edit.html')

            show_datarequest.assert_called_once_with(self.expected_context,
                                                     {'id': datarequest_id})
            update_datarequest.assert_called_once_with(self.expected_context,
                                                       request_data)

            if validation_error:
                errors_summary = {}
                for key, error in update_datarequest.side_effect.error_dict.items(
                ):
                    errors_summary[key] = ', '.join(error)

                self.assertEquals(update_datarequest.side_effect.error_dict,
                                  controller.c.errors)
                expected_request_data = request_data.copy()
                expected_request_data['id'] = datarequest_id
                self.assertEquals(expected_request_data,
                                  controller.c.datarequest)
                self.assertEquals(errors_summary, controller.c.errors_summary)
                self.assertEquals(original_dr['title'],
                                  controller.c.original_title)
            else:
                self.assertEquals({}, controller.c.errors)
                self.assertEquals({}, controller.c.errors_summary)
                self.assertEquals(original_dr, controller.c.datarequest)
                controller.helpers.url_for.assert_called_once_with(
                    controller=
                    'ckanext.datarequests.controllers.ui_controller:DataRequestsUI',
                    action='show',
                    id=datarequest_id)
                controller.tk.redirect_to.assert_called_once_with(
                    controller.helpers.url_for.return_value)
        else:
            controller.tk.abort.assert_called_once_with(
                403, 'You are not authorized to update the Data Request %s' %
                datarequest_id)
            self.assertEquals(0, controller.tk.render.call_count)
Example #57
0
"""

# imports
from __future__ import absolute_import
import pytest
from mock import MagicMock

# DIRAC Components
from DIRAC.WorkloadManagementSystem.Agent.PilotStatusAgent import PilotStatusAgent
from DIRAC import gLogger

# Mock objects
mockReply = MagicMock()
mockAM = MagicMock()
mockNone = MagicMock()
mockNone.return_value = None
mockOK = MagicMock()
mockOK.return_value = {'OK': False}

gLogger.setLevel('DEBUG')


def test_clearWaitingPilots(mocker):
  """ Testing PilotStatusAgent().clearWaitingPilots()
  """

  mocker.patch("DIRAC.WorkloadManagementSystem.Agent.PilotStatusAgent.AgentModule.__init__")
  mocker.patch("DIRAC.WorkloadManagementSystem.Agent.PilotStatusAgent.AgentModule.am_getOption", side_effect=mockAM)
  mocker.patch("DIRAC.WorkloadManagementSystem.Agent.PilotStatusAgent.PilotAgentsDB.__init__", side_effect=mockNone)
  mocker.patch("DIRAC.WorkloadManagementSystem.Agent.PilotStatusAgent.JobDB.__init__", side_effect=mockNone)
  module_str = "DIRAC.WorkloadManagementSystem.Agent.PilotStatusAgent.PilotAgentsDB.buildCondition"
Example #58
0
    def test_extraction_with_single_result(self) -> None:
        with patch.object(SQLAlchemyExtractor,
                          '_get_connection') as mock_connection:
            connection = MagicMock()
            mock_connection.return_value = connection
            sql_execute = MagicMock()
            connection.execute = sql_execute
            table = {
                'schema': 'test_schema',
                'name': 'test_table',
                'description': 'a table for testing',
                'td_cluster': self.conf[TeradataMetadataExtractor.CLUSTER_KEY]
            }

            sql_execute.return_value = [
                self._union(
                    {
                        'col_name': 'col_id1',
                        'col_type': 'bigint',
                        'col_description': 'description of id1',
                        'col_sort_order': 0
                    }, table),
                self._union(
                    {
                        'col_name': 'col_id2',
                        'col_type': 'bigint',
                        'col_description': 'description of id2',
                        'col_sort_order': 1
                    }, table),
                self._union(
                    {
                        'col_name': 'is_active',
                        'col_type': 'boolean',
                        'col_description': None,
                        'col_sort_order': 2
                    }, table),
                self._union(
                    {
                        'col_name': 'source',
                        'col_type': 'varchar',
                        'col_description': 'description of source',
                        'col_sort_order': 3
                    }, table),
                self._union(
                    {
                        'col_name': 'etl_created_at',
                        'col_type': 'timestamp',
                        'col_description': 'description of etl_created_at',
                        'col_sort_order': 4
                    }, table),
                self._union(
                    {
                        'col_name': 'ds',
                        'col_type': 'varchar',
                        'col_description': None,
                        'col_sort_order': 5
                    }, table)
            ]

            extractor = TeradataMetadataExtractor()
            extractor.init(self.conf)
            actual = extractor.extract()
            expected = TableMetadata(
                'teradata', 'MY_CLUSTER', 'test_schema', 'test_table',
                'a table for testing', [
                    ColumnMetadata('col_id1', 'description of id1', 'bigint',
                                   0),
                    ColumnMetadata('col_id2', 'description of id2', 'bigint',
                                   1),
                    ColumnMetadata('is_active', None, 'boolean', 2),
                    ColumnMetadata('source', 'description of source',
                                   'varchar', 3),
                    ColumnMetadata('etl_created_at',
                                   'description of etl_created_at',
                                   'timestamp', 4),
                    ColumnMetadata('ds', None, 'varchar', 5)
                ])

            self.assertEqual(expected.__repr__(), actual.__repr__())
            self.assertIsNone(extractor.extract())
Example #59
0
def test_should_return_false_on_compare_fails(mock_checkpw: MagicMock,
                                              sut: BcryptAdapter):
    mock_checkpw.return_value = False
    result = sut.compare("any_value", "any_hash")
    assert not result
Example #60
0
viewer = custom_viewer('Testing Custom Viewer',
                       a=(0, 100),
                       b='att',
                       c='att(x)',
                       d=True,
                       e=False,
                       f=['a', 'b', 'c'],
                       g=OrderedDict(a=1, b=2, c=3),
                       h=64)

setup = MagicMock()
settings_changed = MagicMock()
plot_subset = MagicMock()
plot_data = MagicMock()
make_selector = MagicMock()
make_selector.return_value = MagicMock(spec=SubsetState)
make_selector().copy.return_value = MagicMock(spec=SubsetState)
make_selector().copy().to_mask.return_value = np.array([False, True, True])


@viewer.setup
def _setup(axes):
    setup(axes)


@viewer.plot_data
def _plot_data(axes, a, b, g, h):
    plot_data(axes=axes, a=a, b=b, g=g, h=h)
    return []