-
Notifications
You must be signed in to change notification settings - Fork 42
/
Makefile
177 lines (145 loc) · 5.24 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
.PHONY: clean data requirements
#################################################################################
# GLOBALS #
#################################################################################
PROJECT_DIR := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
PROFILE = default
PROJECT_NAME = weibull-knowledge-informed
PYTHON_INTERPRETER = python3
ifeq (,$(shell which conda))
HAS_CONDA=False
else
HAS_CONDA=True
endif
#################################################################################
# COMMANDS #
#################################################################################
## Create environment and install dependencies
create_environment: test_environment
ifeq (True,$(HAS_CONDA)) # assume on local
@echo ">>> Detected conda. Assume local computer. Installing packages from yml."
bash make_local_venv.sh
else # assume on HPC
@echo ">>> No Conda detected. Assume on HPC."
bash make_hpc_venv.sh
@echo ">>> venv created. Activate with source ~/weibull/bin/activate"
endif
## Download data
download:
ifeq (True,$(HAS_CONDA)) # assume on local
bash src/data/download_data_local.sh $(PROJECT_DIR)
else # assume on HPC
bash src/data/download_data_hpc.sh $(PROJECT_DIR)
endif
## Extract downloaded data and rename directories as needed
extract:
ifeq (True,$(HAS_CONDA)) # assume on local
bash src/data/extract_data_local.sh $(PROJECT_DIR)
else # assume on HPC
bash src/data/extract_data_hpc.sh $(PROJECT_DIR)
endif
## Make Dataset
data: requirements
$(PYTHON_INTERPRETER) src/data/make_dataset.py data/raw data/processed
## Train on IMS
train_ims:
ifeq (True,$(HAS_CONDA)) # assume on local
$(PYTHON_INTERPRETER) src/models/train_models.py --data_set ims --path_data data/processed --proj_dir $(PROJECT_DIR)
else # assume on HPC
sbatch src/models/train_model_ims_hpc.sh $(PROJECT_DIR)
endif
## Train on FEMTO
train_femto:
ifeq (True,$(HAS_CONDA)) # assume on local
$(PYTHON_INTERPRETER) src/models/train_models.py --data_set femto --path_data data/processed --proj_dir $(PROJECT_DIR)
else # assume on HPC
sbatch src/models/train_model_femto_hpc.sh $(PROJECT_DIR)
endif
## Gather the IMS models and generate summaries of how the models perform
summarize_ims_models:
ifeq (True,$(HAS_CONDA)) # assume on local
$(PYTHON_INTERPRETER) src/models/summarize_model_results.py --data_set ims
else # assume on HPC
sbatch src/models/summarize_model_ims_hpc.sh $(PROJECT_DIR)
endif
## Gather the PRONOSTIAL (FEMTO) models and generate summaries of how the models perform
summarize_femto_models:
ifeq (True,$(HAS_CONDA)) # assume on local
$(PYTHON_INTERPRETER) src/models/summarize_model_results.py --data_set femto
else # assume on HPC
sbatch src/models/summarize_model_femto_hpc.sh $(PROJECT_DIR)
endif
## Make the figures of the data
figures_data:
$(PYTHON_INTERPRETER) src/visualization/visualize_data.py
## Make the figures of the results
figures_results:
$(PYTHON_INTERPRETER) src/visualization/visualize_results.py
## Delete all compiled Python files
clean:
find . -type f -name "*.py[co]" -delete
find . -type d -name "__pycache__" -delete
## Test python environment is setup correctly
test_environment:
$(PYTHON_INTERPRETER) test_environment.py
#################################################################################
# PROJECT RULES #
#################################################################################
#################################################################################
# Self Documenting Commands #
#################################################################################
.DEFAULT_GOAL := help
# Inspired by <http://marmelab.com/blog/2016/02/29/auto-documented-makefile.html>
# sed script explained:
# /^##/:
# * save line in hold space
# * purge line
# * Loop:
# * append newline + line to hold space
# * go to next line
# * if line starts with doc comment, strip comment character off and loop
# * remove target prerequisites
# * append hold space (+ newline) to line
# * replace newline plus comments by `---`
# * print line
# Separate expressions are necessary because labels cannot be delimited by
# semicolon; see <http://stackoverflow.com/a/11799865/1968>
.PHONY: help
help:
@echo "$$(tput bold)Available rules:$$(tput sgr0)"
@echo
@sed -n -e "/^## / { \
h; \
s/.*//; \
:doc" \
-e "H; \
n; \
s/^## //; \
t doc" \
-e "s/:.*//; \
G; \
s/\\n## /---/; \
s/\\n/ /g; \
p; \
}" ${MAKEFILE_LIST} \
| LC_ALL='C' sort --ignore-case \
| awk -F '---' \
-v ncol=$$(tput cols) \
-v indent=19 \
-v col_on="$$(tput setaf 6)" \
-v col_off="$$(tput sgr0)" \
'{ \
printf "%s%*s%s ", col_on, -indent, $$1, col_off; \
n = split($$2, words, " "); \
line_length = ncol - indent; \
for (i = 1; i <= n; i++) { \
line_length -= length(words[i]) + 1; \
if (line_length <= 0) { \
line_length = ncol - indent - length(words[i]) - 1; \
printf "\n%*s ", -indent, " "; \
} \
printf "%s ", words[i]; \
} \
printf "\n"; \
}' \
| more $(shell test $(shell uname) = Darwin && echo '--no-init --raw-control-chars')