-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path.gitignore
46 lines (46 loc) · 1.88 KB
/
.gitignore
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
/__pycache__
/glove
/pycocoevalcap/__pycache__
/pycocoevalcap/bleu/__pycache__
/pycocoevalcap/cider/__pycache__
/pycocoevalcap/meteor/__pycache__
/pycocoevalcap/rouge/__pycache__
/pycocoevalcap/spice/__pycache__
/pycocoevalcap/spice/lib
/pycocoevalcap/spice/tmp
/MLE/show_tell/__pycache__
/MLE/show_tell/show_tell_log
/MLE/show_tell/show_tell_models
/MLE/show_tell/show_tell_model
/MLE/show_tell/show_tell_evaluate
/MLE/show_tell/show_tell_decode
/MLE/show_attend_tell/__pycache__
/MLE/show_attend_tell/show_attend_tell_log
/MLE/show_attend_tell/show_attend_tell_models
/MLE/show_attend_tell/show_attend_tell_model
/MLE/show_attend_tell/show_attend_tell_evaluate
/MLE/show_attend_tell/show_attend_tell_decode
/MLE/adaptive_attention/__pycache__
/MLE/adaptive_attention/adaptive_attention_log
/MLE/adaptive_attention/adaptive_attention_models
/MLE/adaptive_attention/adaptive_attention_model
/MLE/adaptive_attention/adaptive_attention_evaluate
/MLE/adaptive_attention/adaptive_attention_decode
/MLE/contrasive_learning/__pycache__
/MLE/contrasive_learning/reference_model
/MLE/contrasive_learning/adaptive_attention_CL_log
/MLE/contrasive_learning/adaptive_attention_CL_models
/MLE/contrasive_learning/adaptive_attention_CL_model
/MLE/contrasive_learning/adaptive_attention_CL_evaluate
/MLE/contrasive_learning/adaptive_attention_CL_decode
/MLE/hard_attention/__pycache__/
/MLE/hard_attention/hard_attention-REINFORCE_evaluate/
/MLE/hard_attention/hard_attention-REINFORCE_log/
/MLE/hard_attention/hard_attention-REINFORCE_models/
/MLE/hard_attention/hard_attention-REINFORCE_model/
/MLE/hard_attention/hard_attention-REINFORCE_decode
/MLE/hard_attention/hard_attention-Gumbel_softmax_evaluate/
/MLE/hard_attention/hard_attention-Gumbel_softmax_log/
/MLE/hard_attention/hard_attention-Gumbel_softmax_models/
/MLE/hard_attention/hard_attention-Gumbel_softmax_model/
/MLE/hard_attention/hard_attention-Gumbel_softmax_decode