-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathtest_config.yaml
86 lines (80 loc) · 1.93 KB
/
test_config.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
# Basic config file
# comment for empty args
VOCABULARY:
huggingface_model: "Enoch/graphcodebert-py"
READER:
huggingface_model: "Enoch/graphcodebert-py"
snippet_splitter: "\n$$$\n"
label_splitter: " $x$ "
multi_labels: " ; "
part_graph: [256, 256]
# kwargs_tokenizer:
# kwargs_indexer:
MODEL:
labels:
- "miss_return"
- "print_return"
- "bad_division"
- "bad_variable"
- "bad_loop"
- "bad_range"
- "bad_assign"
- "bad_list"
- "bad_file"
- "miss_try"
- "miss_parenthesis"
- "hardcoded_arg"
- "overwrite_not_increment"
- "miss_loop"
- "failed"
- "correct"
huggingface_model: "Enoch/graphcodebert-py"
kwargs_embedder:
trainable: True
embedding_size: 768
encoder:
name: "bert_pooler"
arg: ["Enoch/graphcodebert-py"]
# kwargs:
classification_head:
name: "mult_dense"
arg: [ 1, 768, 18 ]
kwargs:
activation:
name: "leaky_relu"
# arg:
# kwargs:
norm: True
# accuracy:
# name: "categorical_accuracy"
# arg:
# kwargs:
loss:
name: "multilabel_soft_margin_loss"
multi_label: True
debug: False
TRAINER:
validation_metric: "-loss"
learning_rate: 1.e-5
INTERPRETER:
captum: True
kwargs:
interpreter_name: "LayerIntegratedGradients"
layer: "bert_interpretable_layer"
attribute_kwargs:
n_steps: 10
internal_batch_size: 1
CONFIG:
predict: True
no_loop: True
no_eval: True
gui: False
model: "Enoch/graphcodebert-py"
training: "demo/train.txt"
validation: "demo/validation.txt"
evaluation: "demo/test.txt"
serialization_dir: "demo"
no_loop_weight_file: "model_weights.th"
batch_size: 8
loops: 10
device: "cuda"