Skip to content

Commit

Permalink
remove dict
Browse files Browse the repository at this point in the history
  • Loading branch information
karinazad committed Jan 28, 2025
1 parent fac1e41 commit 6825097
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion src/lobster/tokenization/_smiles_tokenizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def _make_smiles_tokenizer(
"""Create PreTrainedTokenizerFast for SMILES Regex tokenization."""

vocab = load_vocab_file(VOCAB_PATH if vocab_file is None else vocab_file)
vocab = {v: k for k, v in dict(enumerate(vocab)).items()}
vocab = {v: k for k, v in enumerate(vocab)}

tok = Tokenizer(BPE(vocab, merges=[], unk_token="<unk>", ignore_merges=True))

Expand Down

0 comments on commit 6825097

Please sign in to comment.