Skip to content

Commit

Permalink
Update aquila.py
Browse files Browse the repository at this point in the history
  • Loading branch information
BAAI-OpenPlatform authored Aug 2, 2023
1 parent 7aeb339 commit d3722a6
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions flagai/model/predictor/aquila.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@ def aquila_generate(

total_len = min(2048, max_gen_len + max_prompt_size)

# tokens = torch.full((bsz, total_len), 0).cuda().long()
tokens = torch.full((bsz, total_len), 0).to("cuda:5").long()
tokens = torch.full((bsz, total_len), 0).cuda().long()
#tokens = torch.full((bsz, total_len), 0).to("cuda:5").long()
for k, t in enumerate(prompt_tokens):
tokens[k, : len(t)] = t.clone().detach().long()
input_text_mask = tokens != 0
Expand Down

0 comments on commit d3722a6

Please sign in to comment.