From 682509744e0e2af76cc810bce4d83556a02bc5bc Mon Sep 17 00:00:00 2001 From: karinazad Date: Tue, 28 Jan 2025 17:20:23 -0500 Subject: [PATCH] remove dict --- src/lobster/tokenization/_smiles_tokenizer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lobster/tokenization/_smiles_tokenizer.py b/src/lobster/tokenization/_smiles_tokenizer.py index f59c2c4..0700ef6 100644 --- a/src/lobster/tokenization/_smiles_tokenizer.py +++ b/src/lobster/tokenization/_smiles_tokenizer.py @@ -29,7 +29,7 @@ def _make_smiles_tokenizer( """Create PreTrainedTokenizerFast for SMILES Regex tokenization.""" vocab = load_vocab_file(VOCAB_PATH if vocab_file is None else vocab_file) - vocab = {v: k for k, v in dict(enumerate(vocab)).items()} + vocab = {v: k for k, v in enumerate(vocab)} tok = Tokenizer(BPE(vocab, merges=[], unk_token="", ignore_merges=True))