Skip to content

Commit

Permalink
Fix: Removed model params except max_token for deepseek r1 in volceng…
Browse files Browse the repository at this point in the history
…ine (#13446)
  • Loading branch information
droxer authored Feb 10, 2025
1 parent 75113c2 commit 7796984
Showing 1 changed file with 63 additions and 46 deletions.
109 changes: 63 additions & 46 deletions api/core/model_runtime/model_providers/volcengine_maas/llm/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,54 +328,71 @@ def get_customizable_model_schema(self, model: str, credentials: dict) -> Option
"""
model_config = get_model_config(credentials)

rules = [
ParameterRule(
name="temperature",
type=ParameterType.FLOAT,
use_template="temperature",
label=I18nObject(zh_Hans="温度", en_US="Temperature"),
),
ParameterRule(
name="top_p",
type=ParameterType.FLOAT,
use_template="top_p",
label=I18nObject(zh_Hans="Top P", en_US="Top P"),
),
ParameterRule(
name="top_k", type=ParameterType.INT, min=1, default=1, label=I18nObject(zh_Hans="Top K", en_US="Top K")
),
ParameterRule(
name="presence_penalty",
type=ParameterType.FLOAT,
use_template="presence_penalty",
label=I18nObject(
en_US="Presence Penalty",
zh_Hans="存在惩罚",
if model.startswith("DeepSeek-R1"):
rules = [
ParameterRule(
name="max_tokens",
type=ParameterType.INT,
use_template="max_tokens",
min=1,
max=model_config.properties.max_tokens,
default=512,
label=I18nObject(zh_Hans="最大生成长度", en_US="Max Tokens"),
),
min=-2.0,
max=2.0,
),
ParameterRule(
name="frequency_penalty",
type=ParameterType.FLOAT,
use_template="frequency_penalty",
label=I18nObject(
en_US="Frequency Penalty",
zh_Hans="频率惩罚",
]
else:
rules = [
ParameterRule(
name="temperature",
type=ParameterType.FLOAT,
use_template="temperature",
label=I18nObject(zh_Hans="温度", en_US="Temperature"),
),
min=-2.0,
max=2.0,
),
ParameterRule(
name="max_tokens",
type=ParameterType.INT,
use_template="max_tokens",
min=1,
max=model_config.properties.max_tokens,
default=512,
label=I18nObject(zh_Hans="最大生成长度", en_US="Max Tokens"),
),
]
ParameterRule(
name="top_p",
type=ParameterType.FLOAT,
use_template="top_p",
label=I18nObject(zh_Hans="Top P", en_US="Top P"),
),
ParameterRule(
name="top_k",
type=ParameterType.INT,
min=1,
default=1,
label=I18nObject(zh_Hans="Top K", en_US="Top K"),
),
ParameterRule(
name="presence_penalty",
type=ParameterType.FLOAT,
use_template="presence_penalty",
label=I18nObject(
en_US="Presence Penalty",
zh_Hans="存在惩罚",
),
min=-2.0,
max=2.0,
),
ParameterRule(
name="frequency_penalty",
type=ParameterType.FLOAT,
use_template="frequency_penalty",
label=I18nObject(
en_US="Frequency Penalty",
zh_Hans="频率惩罚",
),
min=-2.0,
max=2.0,
),
ParameterRule(
name="max_tokens",
type=ParameterType.INT,
use_template="max_tokens",
min=1,
max=model_config.properties.max_tokens,
default=512,
label=I18nObject(zh_Hans="最大生成长度", en_US="Max Tokens"),
),
]

model_properties = {}
model_properties[ModelPropertyKey.CONTEXT_SIZE] = model_config.properties.context_size
Expand Down

0 comments on commit 7796984

Please sign in to comment.