Skip to content

Commit

Permalink
[Fix] Fix C++ binding for fill_next_token_bitmask (#93)
Browse files Browse the repository at this point in the history
This PR fixes the problem in fill_next_token_bitmask related to C++ binding.
  • Loading branch information
Ubospica authored Nov 24, 2024
1 parent 5dced7e commit 7b31407
Showing 1 changed file with 5 additions and 1 deletion.
6 changes: 5 additions & 1 deletion python/xgrammar/matcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,11 @@ def fill_next_token_bitmask(self, bitmask: torch.Tensor, index: int = 0) -> None
index : int, default: 0
The batch id of the bitmask.
"""
self._handle.fill_next_token_bitmask(bitmask, index)
if bitmask.device.type != "cpu":
raise ValueError("bitmask should be on CPU.")
if bitmask.dtype != bitmask_dtype:
raise ValueError(f"bitmask should be of type {bitmask_dtype}.")
self._handle.fill_next_token_bitmask(bitmask.data_ptr(), list(bitmask.shape), index)

def find_jump_forward_string(self) -> str:
"""Find the jump-forward string for jump-forward decoding. This is the longest string that
Expand Down

0 comments on commit 7b31407

Please sign in to comment.