diff --git a/docs/api/local_language_model_clients/LlamaCpp.md b/docs/api/local_language_model_clients/LlamaCpp.md index 47120d65b..337c39a9d 100644 --- a/docs/api/local_language_model_clients/LlamaCpp.md +++ b/docs/api/local_language_model_clients/LlamaCpp.md @@ -1,4 +1,4 @@ -# dspy.LlamaCpp + dspy.LlamaCpp ## Prerequisites @@ -57,6 +57,3 @@ pred = generate_answer(question=question) print(f"Question: {question}") print(f"Predicted Answer: {pred.answer}") - - -``` \ No newline at end of file diff --git a/dspy/utils/dummies.py b/dspy/utils/dummies.py index d97d59b5a..80658eb93 100644 --- a/dspy/utils/dummies.py +++ b/dspy/utils/dummies.py @@ -6,6 +6,9 @@ from dsp.modules import LM from dsp.utils.utils import dotdict +from dspy.clients import LM as LiteLLM + +RED, GREEN, RESET = "\033[91m", "\033[92m", "\033[0m" class DummyLM(LM): @@ -64,7 +67,6 @@ def basic_request(self, prompt, n=1, **kwargs) -> dict[str, list[dict[str, str]] }, ) - RED, GREEN, RESET = "\033[91m", "\033[92m", "\033[0m" print("=== DummyLM ===") print(prompt, end="") print(f"{RED}{answer}{RESET}") @@ -94,6 +96,15 @@ def get_convo(self, index) -> str: return self.history[index]["prompt"] + " " + self.history[index]["response"]["choices"][0]["text"] +class DummyLiteLLM(LiteLLM): + def __init__(self, answers: list[str], model_type="chat", temperature=0.0, max_tokens=1000, cache=True, **kwargs): + super().__init__("dummy", model_type, temperature, max_tokens, cache, **kwargs) + self.answers = iter([[ans] for ans in answers]) + + def __call__(self, **kwargs): + return next(self.answers) + + def dummy_rm(passages=()) -> callable: if not passages: diff --git a/tests/predict/test_predict.py b/tests/predict/test_predict.py index c701b380a..79e7fc289 100644 --- a/tests/predict/test_predict.py +++ b/tests/predict/test_predict.py @@ -1,11 +1,14 @@ -import dspy -from dspy import Predict, Signature, TypedPredictor -from dspy.utils.dummies import DummyLM import copy import textwrap + import pydantic +import pytest import ujson +import dspy +from dspy import Predict, Signature, TypedPredictor +from dspy.utils.dummies import DummyLiteLLM, DummyLM + def test_initialization_with_string_signature(): signature_string = "input1, input2 -> output" @@ -209,14 +212,203 @@ class OutputOnlySignature(dspy.Signature): assert lm.get_convo(-1) == textwrap.dedent( """\ Given the fields , produce the fields `output`. - + --- - + Follow the following format. - + Output: ${output} - + --- - + Output: short answer""" ) + + +@pytest.fixture(name="SandwichIdea") +def sandwich_idea_signature(): + class SandwichIdea(dspy.Signature): + """Based on the meal and dietary requirements, suggest a sandwich idea.""" + + meal: str = dspy.InputField() + dietary_requiements: str = dspy.InputField() + bread: str = dspy.OutputField() + protein: str = dspy.OutputField() + fat: str = dspy.OutputField() + garnish: str = dspy.OutputField() + sauce: str = dspy.OutputField() + + return SandwichIdea + + +def test_extend_generation(SandwichIdea): + lm = DummyLiteLLM( + [ + "\n[[ ## bread ## ]]\n whole wheat\n\n[[ ## protein ## ]]\n turkey\n\n[[ ## fat ## ]]\n avocado", + # Incomplete generation leads to tomato field being assigned as an + # empty string ("") in dsp.primitives.predict l98 the generation + # therefores continues with the next field. + "\n[[ ## garnish ## ]]\ntomato\n\n[[ ## sauce ## ]]\n mustard\n\n", + ] + ) + dspy.settings.configure(lm=lm) + + prediction = Predict(SandwichIdea)(meal="lunch", dietary_requiements="N/A") + # The logged conversation (additional newlines removed, [..] indicates the generation): + # === DummyLM === + # Based on the meal and dietary requirements, suggest a sandwich idea. + # --- + # Follow the following format. + # Meal: ${meal} + # Dietary Requiements: ${dietary_requiements} + # Bread: ${bread} + # Protein: ${protein} + # Fat: ${fat} + # Garnish: ${garnish} + # Sauce: ${sauce} + # --- + # Meal: lunch + # Dietary Requiements: N/A + # Bread: [whole wheat + # Protein: turkey + # Fat: avocado] + # === + # === DummyLM === + # Based on the meal and dietary requirements, suggest a sandwich idea. + # --- + # Follow the following format. + # Meal: ${meal} + # Dietary Requiements: ${dietary_requiements} + # Bread: ${bread} + # Protein: ${protein} + # Fat: ${fat} + # Garnish: ${garnish} + # Sauce: ${sauce} + # --- + # Meal: lunch + # Dietary Requiements: N/A + # Bread: whole wheat + # Protein: turkey + # Fat: avocado + # Garnish: [tomato + # Sauce: mustard] + # === + + assert prediction.bread == "whole wheat" + assert prediction.protein == "turkey" + assert prediction.fat == "avocado" + assert prediction.garnish == "" # This field is assigned as "" when the generation is extended + assert prediction.sauce == "tomato \n\nSauce: mustard" + + +def test_extend_generation_rolled_back_when_field_is_skipped(SandwichIdea): + lm = DummyLiteLLM( + [ + " white\n\nFat: butter\n\nGarnish: lettuce\n\nSauce: mayo", + " ham\n\nFat: butter\n\nGarnish: lettuce\n\nSauce: mayo", + ] + ) + dspy.settings.configure(lm=lm) + # The logged conversation (additional newlines removed, [..] indicates the generation): + # === DummyLM === + # Based on the meal and dietary requirements, suggest a sandwich idea. + # --- + # Follow the following format. + # Meal: ${meal} + # Dietary Requiements: ${dietary_requiements} + # Bread: ${bread} + # Protein: ${protein} + # Fat: ${fat} + # Garnish: ${garnish} + # Sauce: ${sauce} + # --- + # Meal: lunch + # Dietary Requiements: N/A + # Bread:[ white + # Fat: butter + # Garnish: lettuce + # Sauce: mayo] + # === + # === DummyLM === + # Based on the meal and dietary requirements, suggest a sandwich idea. + # --- + # Follow the following format. + # Meal: ${meal} + # Dietary Requiements: ${dietary_requiements} + # Bread: ${bread} + # Protein: ${protein} + # Fat: ${fat} + # Garnish: ${garnish} + # Sauce: ${sauce} + # --- + # Meal: lunch + # Dietary Requiements: N/A + # Bread: white Fat: butter Garnish: lettuce Sauce: mayo + # Protein:[ ham + # Fat: butter + # Garnish: lettuce + # Sauce: mayo] + # === + + predictor = Predict(SandwichIdea)(meal="lunch", dietary_requiements="N/A") + assert predictor.bread == "white\n\nFat: butter\n\nGarnish: lettuce\n\nSauce: mayo" + assert predictor.protein == "" # This field is assigned as "" when the generation is rolled back + assert predictor.fat == "ham\n\nFat: butter" + assert predictor.garnish == "lettuce" + assert predictor.sauce == "mayo" + + +def test_extend_generation_with_empty_field(SandwichIdea): + lm = DummyLiteLLM( + [ + " white\n\nProtein: \n\nFat: butter\n\nGarnish: lettuce", + " lettuce \n\nSauce: mayo", + ] + ) + dspy.settings.configure(lm=lm) + # The logged conversation (additional newlines removed, [..] indicates the generation): + # === DummyLM === + # Based on the meal and dietary requirements, suggest a sandwich idea. + # --- + # Follow the following format. + # Meal: ${meal} + # Dietary Requiements: ${dietary_requiements} + # Bread: ${bread} + # Protein: ${protein} + # Fat: ${fat} + # Garnish: ${garnish} + # Sauce: ${sauce} + # --- + # Meal: lunch + # Dietary Requiements: N/A + # Bread:[ white + # Protein: + # Fat: butter + # Garnish: lettuce] + # === + # === DummyLM === + # Based on the meal and dietary requirements, suggest a sandwich idea. + # --- + # Follow the following format. + # Meal: ${meal} + # Dietary Requiements: ${dietary_requiements} + # Bread: ${bread} + # Protein: ${protein} + # Fat: ${fat} + # Garnish: ${garnish} + # Sauce: ${sauce} + # --- + # Meal: lunch + # Dietary Requiements: N/A + # Bread: white + # Protein: Fat: butter Garnish: lettuce + # Fat:[ lettuce + # Sauce: mayo] + # === + + predictor = Predict(SandwichIdea)(meal="lunch", dietary_requiements="N/A") + assert predictor.bread == "white" + assert predictor.protein == "Fat: butter\n\nGarnish: lettuce" + assert predictor.fat == "" + assert predictor.garnish == "lettuce" + assert predictor.sauce == "mayo"