-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathutils.py
62 lines (57 loc) · 1.87 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
"""
@Date : 2022/12/20
@Time : 12:46
@Author: Ziyang Huang
@Email : [email protected]
"""
import random
import numpy as np
import torch
from torch.backends import cudnn
from transformers import get_linear_schedule_with_warmup, get_constant_schedule_with_warmup, \
get_cosine_schedule_with_warmup, get_cosine_with_hard_restarts_schedule_with_warmup, get_constant_schedule
from torch.optim.optimizer import Optimizer
from typing import Optional
def ensure_reproducibility(seed: int = 42):
random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
np.random.seed(seed)
print(f"set all seeds to {seed}")
cudnn.benchmark = False
cudnn.deterministic = True
def get_lr_scheduler(
name: str,
optimizer: Optimizer,
num_warmup_steps: int,
num_training_steps: int,
num_cycles: Optional[int] = None
):
if name == 'linear':
scheduler = get_linear_schedule_with_warmup(
optimizer=optimizer,
num_warmup_steps=num_warmup_steps,
num_training_steps=num_training_steps
)
elif name == 'constant':
scheduler = get_constant_schedule_with_warmup(
optimizer=optimizer,
num_warmup_steps=num_warmup_steps,
)
elif name == 'cosine':
scheduler = get_cosine_schedule_with_warmup(
optimizer=optimizer,
num_warmup_steps=num_warmup_steps,
num_training_steps=num_training_steps,
)
elif name == 'cosine_hard_restart':
scheduler = get_cosine_with_hard_restarts_schedule_with_warmup(
optimizer=optimizer,
num_warmup_steps=num_warmup_steps,
num_training_steps=num_training_steps,
num_cycles=num_cycles
)
else:
scheduler = get_constant_schedule(optimizer=optimizer)
return scheduler