Skip to content

Commit

Permalink
Add opcheck for psroialign -- currently fails
Browse files Browse the repository at this point in the history
  • Loading branch information
NicolasHug committed Oct 21, 2023
1 parent 68161e9 commit 440f7c4
Showing 1 changed file with 13 additions and 1 deletion.
14 changes: 13 additions & 1 deletion test/test_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,9 @@ class RoIOpTester(ABC):
torch.float32,
torch.float64,
),
ids=str,
# ids=str,
)
@pytest.mark.opcheck_only_one()
def test_forward(self, device, contiguous, x_dtype, rois_dtype=None, deterministic=False, **kwargs):
if device == "mps" and x_dtype is torch.float64:
pytest.skip("MPS does not support float64")
Expand Down Expand Up @@ -186,6 +187,7 @@ def test_torch_fx_trace(self, device, x_dtype=torch.float, rois_dtype=torch.floa
@pytest.mark.parametrize("seed", range(10))
@pytest.mark.parametrize("device", cpu_and_cuda_and_mps())
@pytest.mark.parametrize("contiguous", (True, False))
@pytest.mark.opcheck_only_one()
def test_backward(self, seed, device, contiguous, deterministic=False):
atol = self.mps_backward_atol if device == "mps" else 1e-05
dtype = self.mps_dtype if device == "mps" else self.dtype
Expand Down Expand Up @@ -228,6 +230,7 @@ def func(z):
@needs_cuda
@pytest.mark.parametrize("x_dtype", (torch.float, torch.half))
@pytest.mark.parametrize("rois_dtype", (torch.float, torch.half))
@pytest.mark.opcheck_only_one()
def test_autocast(self, x_dtype, rois_dtype):
with torch.cuda.amp.autocast():
self.test_forward(torch.device("cuda"), contiguous=False, x_dtype=x_dtype, rois_dtype=rois_dtype)
Expand Down Expand Up @@ -646,6 +649,15 @@ def test_boxes_shape(self):
self._helper_boxes_shape(ops.ps_roi_align)


optests.generate_opcheck_tests(
testcase=TestPSRoIAlign,
namespaces=["torchvision"],
failures_dict_path=os.path.join(os.path.dirname(__file__), "optests_failures_dict.json"),
additional_decorators=[],
test_utils=OPTESTS,
)


class TestMultiScaleRoIAlign:
def make_obj(self, fmap_names=None, output_size=(7, 7), sampling_ratio=2, wrap=False):
if fmap_names is None:
Expand Down

0 comments on commit 440f7c4

Please sign in to comment.