-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathservice.py
81 lines (66 loc) · 2.31 KB
/
service.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
import typing as t
import numpy as np
import pandas as pd
from pathlib import Path
import bentoml
from bentoml.validators import DataframeSchema
MODEL_SIZE = "large" # {"small", "base", "large"}
MODEL_ID = f"Salesforce/moirai-1.0-R-{MODEL_SIZE}"
PRED_LEN = 20 # prediction length
CTX_LEN = 200 # context length
PATCH_SIZE = "auto"
BATCH_SIZE = 128
@bentoml.service(
name="bentomoirai",
traffic={
"timeout": 300,
"concurrency": 32,
},
resources={
"gpu": 1,
"gpu_type": "nvidia-tesla-t4",
},
)
class Moirai:
def __init__(self) -> None:
import torch
from uni2ts.model.moirai import MoiraiForecast, MoiraiModule
self.device = "cuda" if torch.cuda.is_available() else "cpu"
self.model = MoiraiForecast(
module=MoiraiModule.from_pretrained(MODEL_ID),
prediction_length=PRED_LEN,
context_length=CTX_LEN,
patch_size=PATCH_SIZE,
num_samples=100,
target_dim=1,
feat_dynamic_real_dim=0,
past_feat_dynamic_real_dim=0,
).to(self.device)
self.predictor = self.model.create_predictor(batch_size=BATCH_SIZE)
print("Model moirai loaded", "device:", self.device)
@bentoml.api
def forecast(self, df: t.Annotated[pd.DataFrame, DataframeSchema(orient="records")]) -> np.ndarray:
from gluonts.dataset.pandas import PandasDataset
from gluonts.dataset.split import split
# the first column of DataFrame should be datetime
df["datetime"] = pd.to_datetime(df["datetime"])
df = df.set_index("datetime")
data_size = df.shape[0]
ds = PandasDataset(dict(df))
# use the entire dataset as test data
train, test_template = split(
ds, offset=-data_size
)
test_data = test_template.generate_instances(
prediction_length=PRED_LEN,
windows=data_size // PRED_LEN,
distance=PRED_LEN,
)
predictor = self.model.create_predictor(batch_size=BATCH_SIZE)
forecasts = predictor.predict(test_data.input)
forecast = next(iter(forecasts))
return forecast.samples
@bentoml.api
def forecast_csv(self, csv: Path) -> np.ndarray:
df = pd.read_csv(csv)
return self.forecast(df)