-
Notifications
You must be signed in to change notification settings - Fork 8
/
scDINO_full_pipeline.yaml
69 lines (67 loc) · 1.84 KB
/
scDINO_full_pipeline.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
meta:
name_of_run: "name_of_run"
output_dir: "/outputdir"
dataset_dir: "/datasetdir"
folder_depth_for_labels: 0 # 0 is the folder where the images are, 1 is the folder above, etc.
train_datasetsplit_fraction: 0.8
seed: 40
images_are_RGB: False
channel_dict: { 0: "aTub", 1: "BF", 2: "DAPI", 3: "Oct4", 4: "PE" } #do not use underscore or slash in channel name
selected_channel_combination_per_run: ["01234", "0", "1", "2", "3", "4"] #01234 is all 5 channels, 0 is only APC, 1 is only BF, etc.
center_crop: 0 #0 is no center crop
train_scDINO:
dino_vit_name: "name_of_model"
fraction_for_mean_std_calc: 0.2
epochs: 10
saveckp_freq: 2
num_gpus: 2
batch_size_per_gpu: 30
num_workers: 8
dist_url: "env://"
hyperparameters:
arch: "vit_small"
patch_size: 16
norm_last_layer: True
momentum_teacher: 0.996
use_bn_in_head: False
warmup_teacher_temp: 0.04
teacher_temp: 0.04
warmup_teacher_temp_epochs: 0
use_fp16: True
weight_decay: 0.04
weight_decay_end: 0.4
clip_grad: 3
freeze_last_layer: 1
lr: 0.0005
warmup_epochs: 5
min_lr: 0.000001
optimizer: "adamw"
drop_path_rate: 0.1
global_crops_scale: !!python/tuple [0.14, 1.0]
local_crops_number: 8
local_crops_scale: !!python/tuple [0.05, 0.14]
downstream_analyses:
compute_cls_features:
normalize: True
num_gpus: 2
batch_size_per_gpu: 24
num_workers: 8
checkpoint_key: teacher
resize: True
resize_length: 224 #only used if resize is True
attention_visualisation:
num_images_per_class: 1
resize_attention_image: True
image_size: 480
kNN:
global:
n_neighbors: [5, 10, 50, 100, 200, 500]
temperature: 0.1
umap_eval:
n_neighbors: 30
min_dist: 0.4
n_components: 2
metric: euclidean
spread: 1.1
epochs: 100
topometry_plots: False