diff --git a/api/core/model_runtime/model_providers/volcengine_maas/llm/llm.py b/api/core/model_runtime/model_providers/volcengine_maas/llm/llm.py index d513d6b5480520..ac2805f7d4bc0b 100644 --- a/api/core/model_runtime/model_providers/volcengine_maas/llm/llm.py +++ b/api/core/model_runtime/model_providers/volcengine_maas/llm/llm.py @@ -328,54 +328,71 @@ def get_customizable_model_schema(self, model: str, credentials: dict) -> Option """ model_config = get_model_config(credentials) - rules = [ - ParameterRule( - name="temperature", - type=ParameterType.FLOAT, - use_template="temperature", - label=I18nObject(zh_Hans="温度", en_US="Temperature"), - ), - ParameterRule( - name="top_p", - type=ParameterType.FLOAT, - use_template="top_p", - label=I18nObject(zh_Hans="Top P", en_US="Top P"), - ), - ParameterRule( - name="top_k", type=ParameterType.INT, min=1, default=1, label=I18nObject(zh_Hans="Top K", en_US="Top K") - ), - ParameterRule( - name="presence_penalty", - type=ParameterType.FLOAT, - use_template="presence_penalty", - label=I18nObject( - en_US="Presence Penalty", - zh_Hans="存在惩罚", + if model.startswith("DeepSeek-R1"): + rules = [ + ParameterRule( + name="max_tokens", + type=ParameterType.INT, + use_template="max_tokens", + min=1, + max=model_config.properties.max_tokens, + default=512, + label=I18nObject(zh_Hans="最大生成长度", en_US="Max Tokens"), ), - min=-2.0, - max=2.0, - ), - ParameterRule( - name="frequency_penalty", - type=ParameterType.FLOAT, - use_template="frequency_penalty", - label=I18nObject( - en_US="Frequency Penalty", - zh_Hans="频率惩罚", + ] + else: + rules = [ + ParameterRule( + name="temperature", + type=ParameterType.FLOAT, + use_template="temperature", + label=I18nObject(zh_Hans="温度", en_US="Temperature"), ), - min=-2.0, - max=2.0, - ), - ParameterRule( - name="max_tokens", - type=ParameterType.INT, - use_template="max_tokens", - min=1, - max=model_config.properties.max_tokens, - default=512, - label=I18nObject(zh_Hans="最大生成长度", en_US="Max Tokens"), - ), - ] + ParameterRule( + name="top_p", + type=ParameterType.FLOAT, + use_template="top_p", + label=I18nObject(zh_Hans="Top P", en_US="Top P"), + ), + ParameterRule( + name="top_k", + type=ParameterType.INT, + min=1, + default=1, + label=I18nObject(zh_Hans="Top K", en_US="Top K"), + ), + ParameterRule( + name="presence_penalty", + type=ParameterType.FLOAT, + use_template="presence_penalty", + label=I18nObject( + en_US="Presence Penalty", + zh_Hans="存在惩罚", + ), + min=-2.0, + max=2.0, + ), + ParameterRule( + name="frequency_penalty", + type=ParameterType.FLOAT, + use_template="frequency_penalty", + label=I18nObject( + en_US="Frequency Penalty", + zh_Hans="频率惩罚", + ), + min=-2.0, + max=2.0, + ), + ParameterRule( + name="max_tokens", + type=ParameterType.INT, + use_template="max_tokens", + min=1, + max=model_config.properties.max_tokens, + default=512, + label=I18nObject(zh_Hans="最大生成长度", en_US="Max Tokens"), + ), + ] model_properties = {} model_properties[ModelPropertyKey.CONTEXT_SIZE] = model_config.properties.context_size