diff --git a/scripts/bert/index.rst b/scripts/bert/index.rst index 5834565915..3b0fe88cc3 100644 --- a/scripts/bert/index.rst +++ b/scripts/bert/index.rst @@ -104,13 +104,13 @@ GluonNLP also supports the "`DistilBERT `_" mo +-----------------------------------------+----------------------+ | | distilbert_6_768_12 | +=========================================+======================+ -| distil_book_corpus_wiki_en_uncased | ✓ | +| distilbert_book_corpus_wiki_en_uncased | ✓ | +-----------------------------------------+----------------------+ .. code-block:: python import gluonnlp as nlp; import mxnet as mx; - model, vocab = nlp.model.get_model('distilbert_6_768_12', dataset_name='distil_book_corpus_wiki_en_uncased'); + model, vocab = nlp.model.get_model('distilbert_6_768_12', dataset_name='distilbert_book_corpus_wiki_en_uncased'); tokenizer = nlp.data.BERTTokenizer(vocab, lower=True); transform = nlp.data.BERTSentenceTransform(tokenizer, max_seq_length=512, pair=False, pad=False); sample = transform(['Hello world!']); diff --git a/src/gluonnlp/model/bert.py b/src/gluonnlp/model/bert.py index 3989c868b2..c07779df6a 100644 --- a/src/gluonnlp/model/bert.py +++ b/src/gluonnlp/model/bert.py @@ -1254,7 +1254,7 @@ def bert_24_1024_16(dataset_name=None, vocab=None, pretrained=True, ctx=mx.cpu() pretrained_allow_missing=pretrained_allow_missing, hparam_allow_override=hparam_allow_override, **kwargs) -def distilbert_6_768_12(dataset_name='distil_book_corpus_wiki_en_uncased', vocab=None, +def distilbert_6_768_12(dataset_name='distilbert_book_corpus_wiki_en_uncased', vocab=None, pretrained=True, ctx=mx.cpu(), output_attention=False, output_all_encodings=False,