From d71ecb2eb98e393cc2e00be002daf898cdd14ecf Mon Sep 17 00:00:00 2001 From: HelloWorldLTY Date: Fri, 23 Feb 2024 16:52:34 -0500 Subject: [PATCH 1/5] update transformers file --- guidance/models/transformers/_transformers.py | 20 ++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/guidance/models/transformers/_transformers.py b/guidance/models/transformers/_transformers.py index 0464ed6c2..9159d200e 100644 --- a/guidance/models/transformers/_transformers.py +++ b/guidance/models/transformers/_transformers.py @@ -1,4 +1,6 @@ import os +from peft import PeftModel + try: import torch @@ -38,7 +40,7 @@ def __init__(self, tokenizer): ) class TransformersEngine(Engine): - def __init__(self, model, tokenizer, compute_log_probs, **kwargs): + def __init__(self, model, tokenizer, peft_model_id, compute_log_probs, **kwargs): # fill in default model value if model is None: model = os.environ.get("TRANSFORMERS_MODEL", None) @@ -49,7 +51,7 @@ def __init__(self, model, tokenizer, compute_log_probs, **kwargs): except: pass - self.model_obj, orig_tokenizer = self._model_and_tokenizer(model, tokenizer, **kwargs) + self.model_obj, orig_tokenizer = self._model_and_tokenizer(model, tokenizer, peft_model_id, **kwargs) if not isinstance(model, str): self.model = model.__class__.__name__ @@ -64,7 +66,7 @@ def __init__(self, model, tokenizer, compute_log_probs, **kwargs): compute_log_probs=compute_log_probs ) - def _model_and_tokenizer(self, model, tokenizer, **kwargs): + def _model_and_tokenizer(self, model, tokenizer, peft_model_id, **kwargs): # intantiate the model and tokenizer if needed if isinstance(model, str): @@ -87,6 +89,14 @@ def _model_and_tokenizer(self, model, tokenizer, **kwargs): except: tokenizer = transformers.AutoTokenizer.from_pretrained(model, use_fast=True, **kwargs) # fall back to the fast tokenizer model = transformers.AutoModelForCausalLM.from_pretrained(model, **kwargs) + + if peft_model_id is not None: + try: + model = PeftModel.from_pretrained(model, peft_model_id) + except ImportError as e: + print("Cannot load peft module, please install with 'pip install peft' or 'pip install git+https://github.com/huggingface/peft") + except Exception as e: #fallthrough general exception + print(f"Exception while applying peft model:\n{e.message}") assert tokenizer is not None, "You must give a tokenizer object when you provide a model object (as opposed to just a model name)!" @@ -155,10 +165,10 @@ def get_logits(self, token_ids, forced_bytes, current_temp): class Transformers(Model): - def __init__(self, model=None, tokenizer=None, echo=True, compute_log_probs=False, **kwargs): + def __init__(self, model=None, tokenizer=None, peft_model_id=None, echo=True, compute_log_probs=False, **kwargs): '''Build a new Transformers model object that represents a model in a given state.''' super().__init__( - TransformersEngine(model, tokenizer, compute_log_probs, **kwargs), + TransformersEngine(model, tokenizer, peft_model_id, compute_log_probs, **kwargs), echo=echo ) From de118280e0a1e057a7ce7cdcd0fc41083e6ca4fd Mon Sep 17 00:00:00 2001 From: HelloWorldLTY <43333475+HelloWorldLTY@users.noreply.github.com> Date: Thu, 7 Mar 2024 10:20:26 -0800 Subject: [PATCH 2/5] Create test_peft.py --- tests/models/test_peft.py | 1 + 1 file changed, 1 insertion(+) create mode 100644 tests/models/test_peft.py diff --git a/tests/models/test_peft.py b/tests/models/test_peft.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/tests/models/test_peft.py @@ -0,0 +1 @@ + From 85d91a7b96197e5e5933b38df6f974b3775be501 Mon Sep 17 00:00:00 2001 From: HelloWorldLTY <43333475+HelloWorldLTY@users.noreply.github.com> Date: Thu, 7 Mar 2024 10:21:31 -0800 Subject: [PATCH 3/5] Update setup.py --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index b7e19acc1..1187a34cc 100644 --- a/setup.py +++ b/setup.py @@ -41,7 +41,8 @@ def find_version(*file_paths): "pyformlang", "protobuf", "fastapi", - "uvicorn" + "uvicorn", + "peft" ], extras_require={ 'docs': [ From 379461a973db6aaf8816ac3d594a189e93246beb Mon Sep 17 00:00:00 2001 From: HelloWorldLTY <43333475+HelloWorldLTY@users.noreply.github.com> Date: Thu, 7 Mar 2024 10:25:10 -0800 Subject: [PATCH 4/5] Update test_peft.py --- tests/models/test_peft.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/models/test_peft.py b/tests/models/test_peft.py index 8b1378917..a81dd87a4 100644 --- a/tests/models/test_peft.py +++ b/tests/models/test_peft.py @@ -1 +1,8 @@ +import guidance +import pytest +def test_peft(): + try: + import peft + except: + raise Exception("Sorry, peft is not installed") From 908e8a92637fb1a622b2e3b102f3bafc121d203d Mon Sep 17 00:00:00 2001 From: HelloWorldLTY <43333475+HelloWorldLTY@users.noreply.github.com> Date: Thu, 7 Mar 2024 13:30:30 -0800 Subject: [PATCH 5/5] Update test_peft.py --- tests/models/test_peft.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/tests/models/test_peft.py b/tests/models/test_peft.py index a81dd87a4..90d80935d 100644 --- a/tests/models/test_peft.py +++ b/tests/models/test_peft.py @@ -1,8 +1,22 @@ -import guidance -import pytest - +from transformers import AutoModelForCausalLM def test_peft(): try: import peft + from peft import get_peft_model + + lora_config = LoraConfig( + r=16, + target_modules=["q_proj", "v_proj"], + task_type=TaskType.CAUSAL_LM, + lora_alpha=32, + lora_dropout=0.05 + ) + model = AutoModelForCausalLM.from_pretrained("facebook/opt-350m") + + lora_model = get_peft_model(model, lora_config) + lora_model.print_trainable_parameters() + + print("Running PEFT is successful!") + except: raise Exception("Sorry, peft is not installed")