-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathinstall.sh
50 lines (40 loc) · 1.96 KB
/
install.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
#!/bin/bash
chmod 600 ~/.ssh/id_rsa
curl -X 'GET' 'http://5.161.179.223:8000/threads/' -H 'accept: application/json' -H 'X-API-Key: david' -v
sudo fallocate -l 30GB /swapfile
sudo chmod 600 /swapfile
sudo mkswap /swapfile
sudo swapon /swapfile
pip install -e git+https://github.com/drew2323/mlroom.git#egg=mlroom
pip install --upgrade "jax[cuda12_pip]" -f https://storage.googleapis.com/jax-releases/jax_cuda_releases.html --default-timeout=100
pip install git+https://github.com/drew2323/keras-tcn.git
pip install tqdm
pip install keras-nlp
pip install keras-nlp-nightly --default-timeout=100
pip install keras
cd src/mlroom/mlroom
#RUN ON LOCAL MAC (or other which has priv key) - transfer priv key
ssh -p 8935 [email protected]
scp -P 8935 ~/.ssh/id_ed25519 [email protected]:~/.ssh/
nano ~/.ssh/known_hosts
#PREP DATA TRANSFER
#TRANSFER TO PROD
#scp -c [email protected] training_data.joblib [email protected]:/mnt/HC_Volume_100253210
rsync -avz --progress training_data.joblib [email protected]:/mnt/HC_Volume_100253210/
rsync -avz --progress ~/src/mlroom/mlroom/training_data.joblib [email protected]:/mnt/HC_Volume_100253210/
#TRANSFER FROM PROD
#a naopak download prepared dat
#scp -c none [email protected]:/mnt/HC_Volume_100253210/training_data.joblib .
rsync -avz --progress [email protected]:/mnt/HC_Volume_100253210/training_data.joblib .
rsync -avz --progress [email protected]:/mnt/HC_Volume_100253210/training_data.joblib ~/src/mlroom/mlroom/
#
nohup python3 train.py > output.log 2>&1 &
2456
#RUN SCRIPTS
python3 train.py train --from_file training_data.joblib
nohup python3 train.py train --from_file training_data.joblib > output.log 2>&1 &
python3 train.py prepare --to_file training_data.joblib
nohup python3 train.py prepare --to_file training_data.joblib > output.log 2>&1 &
python3 train.py train --to_file training_data.joblib
nohup python3 train.py train --to_file training_data.joblib > output.log 2>&1 &
#--toml config.toml #config_tick.toml ...