Skip to content

Commit

Permalink
Add missing iree_hal_target_device flags
Browse files Browse the repository at this point in the history
  • Loading branch information
archana-ramalingam committed Jan 6, 2025
1 parent afc8cff commit 69997e6
Showing 1 changed file with 5 additions and 0 deletions.
5 changes: 5 additions & 0 deletions sharktank/tests/models/llama/benchmark_amdgpu_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -339,6 +339,7 @@ def setUp(self):
irpa_path=str(self.irpa_path),
batch_size=4,
iree_hip_target="gfx942",
iree_hal_target_device="hip",
attention_kernel="torch",
tensor_parallelism_size=self.tensor_parallelism_size,
block_seq_stride=32,
Expand All @@ -347,6 +348,7 @@ def setUp(self):
irpa_path=str(self.irpa_path_fp8),
batch_size=4,
iree_hip_target="gfx942",
iree_hal_target_device="hip",
attention_kernel="torch",
tensor_parallelism_size=self.tensor_parallelism_size,
block_seq_stride=32,
Expand All @@ -355,6 +357,7 @@ def setUp(self):
irpa_path=str(self.irpa_path_fp8),
batch_size=4,
iree_hip_target="gfx942",
iree_hal_target_device="hip",
attention_kernel="torch",
tensor_parallelism_size=self.tensor_parallelism_size,
block_seq_stride=32,
Expand Down Expand Up @@ -664,6 +667,7 @@ def setUp(self):
irpa_path=str(self.irpa_path),
batch_size=4,
iree_hip_target="gfx942",
iree_hal_target_device="hip",
attention_kernel="torch",
tensor_parallelism_size=self.tensor_parallelism_size,
block_seq_stride=32,
Expand All @@ -672,6 +676,7 @@ def setUp(self):
irpa_path=str(self.irpa_path_fp8),
batch_size=4,
iree_hip_target="gfx942",
iree_hal_target_device="hip",
attention_kernel="torch",
tensor_parallelism_size=self.tensor_parallelism_size,
block_seq_stride=32,
Expand Down

0 comments on commit 69997e6

Please sign in to comment.