File tree 7 files changed +37
-13
lines changed
slm/applications/text_classification
hierarchical/retrieval_based
multi_class/retrieval_based
7 files changed +37
-13
lines changed Original file line number Diff line number Diff line change 22
22
23
23
from paddlenlp .data import Pad , Tuple
24
24
from paddlenlp .transformers import AutoTokenizer
25
+ from paddlenlp .utils .env import (
26
+ PADDLE_INFERENCE_MODEL_SUFFIX ,
27
+ PADDLE_INFERENCE_WEIGHTS_SUFFIX ,
28
+ )
25
29
26
30
sys .path .append ("." )
27
31
@@ -114,8 +118,8 @@ def __init__(
114
118
self .max_seq_length = max_seq_length
115
119
self .batch_size = batch_size
116
120
117
- model_file = model_dir + "/inference.get_pooled_embedding.pdmodel "
118
- params_file = model_dir + "/inference.get_pooled_embedding.pdiparams "
121
+ model_file = model_dir + f "/inference.get_pooled_embedding{ PADDLE_INFERENCE_MODEL_SUFFIX } "
122
+ params_file = model_dir + f "/inference.get_pooled_embedding{ PADDLE_INFERENCE_WEIGHTS_SUFFIX } "
119
123
if not os .path .exists (model_file ):
120
124
raise ValueError ("not find model file path {}" .format (model_file ))
121
125
if not os .path .exists (params_file ):
Original file line number Diff line number Diff line change 22
22
23
23
import paddlenlp as ppnlp
24
24
from paddlenlp .data import Pad , Tuple
25
+ from paddlenlp .utils .env import (
26
+ PADDLE_INFERENCE_MODEL_SUFFIX ,
27
+ PADDLE_INFERENCE_WEIGHTS_SUFFIX ,
28
+ )
25
29
26
30
# fmt: off
27
31
parser = argparse .ArgumentParser ()
@@ -82,8 +86,8 @@ def __init__(
82
86
self .max_seq_length = max_seq_length
83
87
self .batch_size = batch_size
84
88
85
- model_file = model_dir + "/inference.get_pooled_embedding.pdmodel "
86
- params_file = model_dir + "/inference.get_pooled_embedding.pdiparams "
89
+ model_file = model_dir + f "/inference.get_pooled_embedding{ PADDLE_INFERENCE_MODEL_SUFFIX } "
90
+ params_file = model_dir + f "/inference.get_pooled_embedding{ PADDLE_INFERENCE_WEIGHTS_SUFFIX } "
87
91
if not os .path .exists (model_file ):
88
92
raise ValueError ("not find model file path {}" .format (model_file ))
89
93
if not os .path .exists (params_file ):
Original file line number Diff line number Diff line change 22
22
23
23
from paddlenlp .data import Pad , Tuple
24
24
from paddlenlp .transformers import AutoTokenizer
25
+ from paddlenlp .utils .env import (
26
+ PADDLE_INFERENCE_MODEL_SUFFIX ,
27
+ PADDLE_INFERENCE_WEIGHTS_SUFFIX ,
28
+ )
25
29
26
30
sys .path .append ("." )
27
31
@@ -114,8 +118,8 @@ def __init__(
114
118
self .max_seq_length = max_seq_length
115
119
self .batch_size = batch_size
116
120
117
- model_file = model_dir + "/inference.get_pooled_embedding.pdmodel "
118
- params_file = model_dir + "/inference.get_pooled_embedding.pdiparams "
121
+ model_file = model_dir + f "/inference.get_pooled_embedding{ PADDLE_INFERENCE_MODEL_SUFFIX } "
122
+ params_file = model_dir + f "/inference.get_pooled_embedding{ PADDLE_INFERENCE_WEIGHTS_SUFFIX } "
119
123
if not os .path .exists (model_file ):
120
124
raise ValueError ("not find model file path {}" .format (model_file ))
121
125
if not os .path .exists (params_file ):
Original file line number Diff line number Diff line change 22
22
23
23
import paddlenlp as ppnlp
24
24
from paddlenlp .data import Pad , Tuple
25
+ from paddlenlp .utils .env import (
26
+ PADDLE_INFERENCE_MODEL_SUFFIX ,
27
+ PADDLE_INFERENCE_WEIGHTS_SUFFIX ,
28
+ )
25
29
26
30
# fmt: off
27
31
parser = argparse .ArgumentParser ()
@@ -83,8 +87,8 @@ def __init__(
83
87
self .max_seq_length = max_seq_length
84
88
self .batch_size = batch_size
85
89
86
- model_file = model_dir + "/inference.get_pooled_embedding.pdmodel "
87
- params_file = model_dir + "/inference.get_pooled_embedding.pdiparams "
90
+ model_file = model_dir + f "/inference.get_pooled_embedding{ PADDLE_INFERENCE_MODEL_SUFFIX } "
91
+ params_file = model_dir + f "/inference.get_pooled_embedding{ PADDLE_INFERENCE_WEIGHTS_SUFFIX } "
88
92
if not os .path .exists (model_file ):
89
93
raise ValueError ("not find model file path {}" .format (model_file ))
90
94
if not os .path .exists (params_file ):
Original file line number Diff line number Diff line change @@ -355,7 +355,7 @@ python export_model.py --params_path ./checkpoint/ --output_path ./export --mult
355
355
export/
356
356
├── float32.pdiparams
357
357
├── float32.pdiparams.info
358
- └── float32.pdmodel
358
+ └── float32.json(PIR enabled)/float32. pdmodel(PIR disabled)
359
359
```
360
360
导出模型之后用于部署,项目提供了基于 ONNXRuntime 的 [ 离线部署方案] ( ./deploy/predictor/README.md ) 和基于 Paddle Serving 的 [ 在线服务化部署方案] ( ./deploy/predictor/README.md ) 。
361
361
Original file line number Diff line number Diff line change 22
22
23
23
from paddlenlp .data import Pad , Tuple
24
24
from paddlenlp .transformers import AutoTokenizer
25
+ from paddlenlp .utils .env import (
26
+ PADDLE_INFERENCE_MODEL_SUFFIX ,
27
+ PADDLE_INFERENCE_WEIGHTS_SUFFIX ,
28
+ )
25
29
26
30
sys .path .append ("." )
27
31
@@ -114,8 +118,8 @@ def __init__(
114
118
self .max_seq_length = max_seq_length
115
119
self .batch_size = batch_size
116
120
117
- model_file = model_dir + "/inference.get_pooled_embedding.pdmodel "
118
- params_file = model_dir + "/inference.get_pooled_embedding.pdiparams "
121
+ model_file = model_dir + f "/inference.get_pooled_embedding{ PADDLE_INFERENCE_MODEL_SUFFIX } "
122
+ params_file = model_dir + f "/inference.get_pooled_embedding{ PADDLE_INFERENCE_WEIGHTS_SUFFIX } "
119
123
if not os .path .exists (model_file ):
120
124
raise ValueError ("not find model file path {}" .format (model_file ))
121
125
if not os .path .exists (params_file ):
Original file line number Diff line number Diff line change 22
22
23
23
import paddlenlp as ppnlp
24
24
from paddlenlp .data import Pad , Tuple
25
+ from paddlenlp .utils .env import (
26
+ PADDLE_INFERENCE_MODEL_SUFFIX ,
27
+ PADDLE_INFERENCE_WEIGHTS_SUFFIX ,
28
+ )
25
29
26
30
# fmt: off
27
31
parser = argparse .ArgumentParser ()
@@ -84,8 +88,8 @@ def __init__(
84
88
self .max_seq_length = max_seq_length
85
89
self .batch_size = batch_size
86
90
87
- model_file = model_dir + "/inference.get_pooled_embedding.pdmodel "
88
- params_file = model_dir + "/inference.get_pooled_embedding.pdiparams "
91
+ model_file = model_dir + f "/inference.get_pooled_embedding{ PADDLE_INFERENCE_MODEL_SUFFIX } "
92
+ params_file = model_dir + f "/inference.get_pooled_embedding{ PADDLE_INFERENCE_WEIGHTS_SUFFIX } "
89
93
if not os .path .exists (model_file ):
90
94
raise ValueError ("not find model file path {}" .format (model_file ))
91
95
if not os .path .exists (params_file ):
You can’t perform that action at this time.
0 commit comments