Skip to content

Fix total_forwards calculation in ablation/permutation for cross-tensor attribution #1540

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions captum/attr/_core/feature_ablation.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,10 @@
from captum._utils.progress import progress, SimpleProgress
from captum._utils.typing import BaselineType, TargetType, TensorOrTupleOfTensorsGeneric
from captum.attr._utils.attribution import PerturbationAttribution
from captum.attr._utils.common import _format_input_baseline
from captum.attr._utils.common import (
_format_input_baseline,
get_total_features_from_mask,
)
from captum.log import log_usage
from torch import dtype, Tensor
from torch.futures import collect_all, Future
Expand Down Expand Up @@ -894,7 +897,9 @@ def _attribute_progress_setup(
formatted_inputs, feature_mask, **kwargs
)
total_forwards = (
math.ceil(int(sum(feature_counts)) / perturbations_per_eval)
math.ceil(
get_total_features_from_mask(feature_mask) / perturbations_per_eval
)
if enable_cross_tensor_attribution
else sum(
math.ceil(count / perturbations_per_eval) for count in feature_counts
Expand Down
13 changes: 13 additions & 0 deletions captum/attr/_utils/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,3 +390,16 @@ def _construct_default_feature_mask(
total_features = current_num_features
feature_mask = tuple(feature_mask)
return feature_mask, total_features


def get_total_features_from_mask(
feature_mask: Tuple[Tensor, ...],
) -> int:
"""
Return the numbers of input features based on the total unique
feature IDs/indices in the feature mask.
"""
seen_idxs = set()
for mask in feature_mask:
seen_idxs |= set(torch.unique(mask).tolist())
return len(seen_idxs)
11 changes: 11 additions & 0 deletions tests/utils/test_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
parse_version,
safe_div,
)
from captum.attr._utils.common import get_total_features_from_mask
from captum.testing.helpers.basic import (
assertTensorAlmostEqual,
assertTensorTuplesAlmostEqual,
Expand Down Expand Up @@ -174,6 +175,16 @@ def test_get_max_feature_index(self) -> None:

assert _get_max_feature_index(mask) == 100

def test_mask_unique_elem(self) -> None:
res = get_total_features_from_mask((torch.tensor([0, 0, 0]),))
self.assertEqual(res, 1)
res = get_total_features_from_mask((torch.tensor([0, 0, 4]),))
self.assertEqual(res, 2)
res = get_total_features_from_mask(
(torch.tensor([0, 0, 4]), torch.tensor([0, 4, 5]))
)
self.assertEqual(res, 3)


class TestParseVersion(BaseTest):
def test_parse_version_dev(self) -> None:
Expand Down