Skip to content

Commit

Permalink
add deepseek system role and keep the same with HF
Browse files Browse the repository at this point in the history
  • Loading branch information
AllentDan committed Jan 15, 2025
1 parent cd2272a commit 3e44d37
Showing 1 changed file with 19 additions and 15 deletions.
34 changes: 19 additions & 15 deletions lmdeploy/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,7 @@ def __init__(
eosys=' ',
user='USER: ',
eoh=' ',
assistant='Assistant:',
assistant='ASSISTANT: ',
eoa='</s>',
stop_words=['</s>'],
**kwargs):
Expand Down Expand Up @@ -358,10 +358,10 @@ def __init__(
super().__init__(meta_instruction=meta_instruction, **kwargs)

def get_prompt(self, prompt, sequence_start=True):
return super().get_prompt(prompt, sequence_start)
return super().get_prompt(prompt, sequence_start)[:-1]

def messages2prompt(self, messages, sequence_start=True, **kwargs):
return super().messages2prompt(messages, sequence_start, **kwargs)
return super().messages2prompt(messages, sequence_start, **kwargs)[:-1]

@classmethod
def match(cls, model_path: str) -> Optional[str]:
Expand All @@ -386,10 +386,10 @@ def __init__(self, **kwargs):
super().__init__(**kwargs)

def get_prompt(self, prompt, sequence_start=True):
return super().get_prompt(prompt, sequence_start)
return super().get_prompt(prompt, sequence_start)[:-1]

def messages2prompt(self, messages, sequence_start=True, **kwargs):
return super().messages2prompt(messages, sequence_start, **kwargs)
return super().messages2prompt(messages, sequence_start, **kwargs)[:-1]

@classmethod
def match(cls, model_path: str) -> Optional[str]:
Expand Down Expand Up @@ -1267,7 +1267,7 @@ def __init__(
eosys='</s>\n',
user='User: ',
eoh='</s>\n',
assistant='Assistant:',
assistant='Assistant: ',
eoa='</s>',
separator='\n',
stop_words=['</s>'],
Expand Down Expand Up @@ -1396,22 +1396,26 @@ def match(cls, model_path: str) -> Optional[str]:
class Deepseek(BaseChatTemplate):

def __init__(self,
meta_instruction=None,
eosys='\n\n',
user='User: ',
eoh='\n\n',
assistant='Assistant:',
assistant='Assistant: ',
eoa='<|end▁of▁sentence|>',
**kwargs):
super().__init__(user=user,
eoh=eoh,
meta_instruction=meta_instruction,
eosys=eosys,
assistant=assistant,
eoa=eoa,
**kwargs)

def get_prompt(self, prompt, sequence_start=True):
return super().get_prompt(prompt, sequence_start)
return super().get_prompt(prompt, sequence_start)[:-1]

def messages2prompt(self, messages, sequence_start=True, **kwargs):
return super().messages2prompt(messages, sequence_start, **kwargs)
return super().messages2prompt(messages, sequence_start, **kwargs)[:-1]

@classmethod
def match(cls, model_path: str) -> Optional[str]:
Expand All @@ -1431,7 +1435,7 @@ class InternVLZH(BaseChatTemplate):
def __init__(self,
user='<human>: ',
eoh=' ',
assistant='<bot>:',
assistant='<bot>: ',
eoa='</s>',
**kwargs):
super().__init__(user=user,
Expand All @@ -1441,10 +1445,10 @@ def __init__(self,
**kwargs)

def get_prompt(self, prompt, sequence_start=True):
return super().get_prompt(prompt, sequence_start)
return super().get_prompt(prompt, sequence_start)[:-1]

def messages2prompt(self, messages, sequence_start=True, **kwargs):
return super().messages2prompt(messages, sequence_start, **kwargs)
return super().messages2prompt(messages, sequence_start, **kwargs)[:-1]

@classmethod
def match(cls, model_path: str) -> Optional[str]:
Expand All @@ -1467,7 +1471,7 @@ def __init__(
eosys='\n\n',
user='User: ',
eoh='\n\n',
assistant='Assistant:',
assistant='Assistant: ',
eoa='<|end▁of▁sentence|>',
**kwargs):
super().__init__(meta_instruction=meta_instruction,
Expand All @@ -1479,10 +1483,10 @@ def __init__(
**kwargs)

def get_prompt(self, prompt, sequence_start=True):
return super().get_prompt(prompt, sequence_start)
return super().get_prompt(prompt, sequence_start)[:-1]

def messages2prompt(self, messages, sequence_start=True, **kwargs):
return super().messages2prompt(messages, sequence_start, **kwargs)
return super().messages2prompt(messages, sequence_start, **kwargs)[:-1]

@classmethod
def match(cls, model_path: str) -> Optional[str]:
Expand Down

0 comments on commit 3e44d37

Please sign in to comment.