Skip to content

Commit

Permalink
add config_amazon_mistral_lite (huggingface#1493)
Browse files Browse the repository at this point in the history
Co-authored-by: Ubuntu <[email protected]>
  • Loading branch information
danielclough and danielclough authored Dec 28, 2023
1 parent d35f0a1 commit cd889c0
Showing 1 changed file with 18 additions and 0 deletions.
18 changes: 18 additions & 0 deletions candle-transformers/src/models/mistral.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,24 @@ impl Config {
use_flash_attn,
}
}

// https://huggingface.co/amazon/MistralLite/blob/main/config.json
pub fn config_amazon_mistral_lite(use_flash_attn: bool) -> Self {
Self {
vocab_size: 32003,
hidden_size: 4096,
intermediate_size: 14336,
num_hidden_layers: 32,
num_attention_heads: 32,
num_key_value_heads: 8,
hidden_act: Activation::Silu,
max_position_embeddings: 32768,
rms_norm_eps: 1e-5,
rope_theta: 10_000.,
sliding_window: 4096,
use_flash_attn,
}
}
}

#[derive(Debug, Clone)]
Expand Down

0 comments on commit cd889c0

Please sign in to comment.