diff --git a/tests/attr/test_approximation_methods.py b/tests/attr/test_approximation_methods.py index b2ced6ecf..6b0dcb69e 100644 --- a/tests/attr/test_approximation_methods.py +++ b/tests/attr/test_approximation_methods.py @@ -15,9 +15,10 @@ def __init__(self, methodName: str = "runTest") -> None: super().__init__(methodName) def test_riemann_0(self) -> None: + step_sizes, alphas = riemann_builders() with self.assertRaises(AssertionError): - step_sizes, alphas = riemann_builders() step_sizes(0) + with self.assertRaises(AssertionError): alphas(0) def test_riemann_2(self) -> None: diff --git a/tests/attr/test_llm_attr.py b/tests/attr/test_llm_attr.py index bbdeced56..96316892e 100644 --- a/tests/attr/test_llm_attr.py +++ b/tests/attr/test_llm_attr.py @@ -904,16 +904,16 @@ def test_get_logprobs_successful(self) -> None: def test_get_logprobs_missing_tokenizer(self) -> None: """Test get_logprobs with missing tokenizer.""" + provider = VLLMProvider(api_url=self.api_url, model_name=self.model_name) with self.assertRaises(ValueError) as context: - provider = VLLMProvider(api_url=self.api_url, model_name=self.model_name) provider.get_logprobs(self.input_prompt, self.target_str, None) self.assertIn("Tokenizer is required", str(context.exception)) def test_get_logprobs_empty_target(self) -> None: """Test get_logprobs with empty target string.""" + provider = VLLMProvider(api_url=self.api_url, model_name=self.model_name) with self.assertRaises(ValueError) as context: - provider = VLLMProvider(api_url=self.api_url, model_name=self.model_name) provider.get_logprobs(self.input_prompt, "", self.tokenizer) self.assertIn("Target string cannot be empty", str(context.exception)) diff --git a/tests/attr/test_targets.py b/tests/attr/test_targets.py index 7e0f6ca21..57175e6dc 100644 --- a/tests/attr/test_targets.py +++ b/tests/attr/test_targets.py @@ -220,15 +220,15 @@ class TestTargets(BaseTest, metaclass=TargetsMeta): def test_simple_target_missing_error(self) -> None: net = BasicModel_MultiLayer() inp = torch.zeros((1, 3)) + attr = IntegratedGradients(net) with self.assertRaises(AssertionError): - attr = IntegratedGradients(net) attr.attribute(inp) # type: ignore[has-type] def test_multi_target_error(self) -> None: net = BasicModel_MultiLayer() inp = torch.zeros((1, 3)) + attr = IntegratedGradients(net) with self.assertRaises(AssertionError): - attr = IntegratedGradients(net) attr.attribute( # type: ignore[has-type] inp, additional_forward_args=(None, True), target=(1, 0) )