Skip to content

Commit

Permalink
Add config for transformer encoder as backbone of time series embedder
Browse files Browse the repository at this point in the history
  • Loading branch information
nathanpainchaud committed Oct 24, 2023
1 parent 741610f commit 8acd54f
Showing 1 changed file with 42 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
_target_: torch.nn.Sequential
_args_:
- _target_: collections.OrderedDict

expand_feat_dim:
_target_: vital.models.layers.Lambda
fn:
_target_: torch.unsqueeze
_partial_: True
dim: -1

upsampling:
_target_: torch.nn.Linear
in_features: 1
out_features: ${task.embed_dim}

positional_encoding:
_target_: didactic.models.layers.PositionalEncoding
sequence_len: ${task.img_tokenizer.resample_dim}
d_model: ${task.embed_dim}

transformer_encoder:
_target_: torch.nn.TransformerEncoder
num_layers: 2

norm:
_target_: torch.nn.LayerNorm
normalized_shape: ${task.embed_dim}

encoder_layer:
_target_: torch.nn.TransformerEncoderLayer
d_model: ${task.embed_dim}
nhead: 2
dim_feedforward: ${op.mul:1.5,${task.embed_dim},int}
dropout: 0.1
activation: relu
batch_first: True
norm_first: True

sequential_pooling:
_target_: didactic.models.layers.SequentialPooling
d_model: ${task.embed_dim}

0 comments on commit 8acd54f

Please sign in to comment.