Skip to content

Commit 17dcaf8

Browse files
authored
Merge pull request #847 from guardrails-ai/dtam/051_6x_deprecations
prompt, instructions, msg_history deprecations, messages RAIL support
2 parents 4872f8f + 8b4df1f commit 17dcaf8

File tree

11 files changed

+328
-32
lines changed

11 files changed

+328
-32
lines changed

guardrails/actions/reask.py

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
from guardrails.classes.validation.validation_result import FailResult
99
from guardrails.prompt.instructions import Instructions
1010
from guardrails.prompt.prompt import Prompt
11+
from guardrails.prompt.messages import Messages
1112
from guardrails.schema.generator import generate_example
1213
from guardrails.schema.rail_schema import json_schema_to_rail_output
1314
from guardrails.types.validator import ValidatorMap
@@ -294,6 +295,19 @@ def get_reask_setup_for_string(
294295
xml_output_schema=xml_output_schema,
295296
**prompt_params,
296297
)
298+
messages = None
299+
if exec_options.reask_messages:
300+
messages = Messages(exec_options.reask_messages)
301+
if messages is None:
302+
messages = Messages(
303+
[{"role": "system", "content": "You are a helpful assistant."}]
304+
)
305+
306+
messages = messages.format(
307+
output_schema=schema_prompt_content,
308+
xml_output_schema=xml_output_schema,
309+
**prompt_params,
310+
)
297311

298312
return output_schema, prompt, instructions
299313

@@ -459,6 +473,18 @@ def reask_decoder(obj: ReAsk):
459473
instructions = Instructions(instructions_const)
460474
instructions = instructions.format(**prompt_params)
461475

476+
# TODO: enable this in 0.6.0
477+
# messages = None
478+
# if exec_options.reask_messages:
479+
# messages = Messages(exec_options.reask_messages)
480+
# else:
481+
# messages = Messages(
482+
# [
483+
# {"role": "system", "content": instructions},
484+
# {"role": "user", "content": prompt},
485+
# ]
486+
# )
487+
462488
return reask_schema, prompt, instructions
463489

464490

guardrails/async_guard.py

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -92,11 +92,12 @@ def from_pydantic(
9292
cls,
9393
output_class: ModelOrListOfModels,
9494
*,
95-
prompt: Optional[str] = None, # deprecate this too
96-
instructions: Optional[str] = None, # deprecate this too
95+
prompt: Optional[str] = None,
96+
instructions: Optional[str] = None,
9797
num_reasks: Optional[int] = None,
98-
reask_prompt: Optional[str] = None, # deprecate this too
99-
reask_instructions: Optional[str] = None, # deprecate this too
98+
reask_prompt: Optional[str] = None,
99+
reask_instructions: Optional[str] = None,
100+
reask_messages: Optional[List[Dict]] = None,
100101
tracer: Optional[Tracer] = None,
101102
name: Optional[str] = None,
102103
description: Optional[str] = None,
@@ -108,6 +109,7 @@ def from_pydantic(
108109
num_reasks=num_reasks,
109110
reask_prompt=reask_prompt,
110111
reask_instructions=reask_instructions,
112+
reask_messages=reask_messages,
111113
tracer=tracer,
112114
name=name,
113115
description=description,
@@ -123,10 +125,10 @@ def from_string(
123125
validators: Sequence[Validator],
124126
*,
125127
string_description: Optional[str] = None,
126-
prompt: Optional[str] = None, # deprecate this too
127-
instructions: Optional[str] = None, # deprecate this too
128-
reask_prompt: Optional[str] = None, # deprecate this too
129-
reask_instructions: Optional[str] = None, # deprecate this too
128+
prompt: Optional[str] = None,
129+
instructions: Optional[str] = None,
130+
reask_prompt: Optional[str] = None,
131+
reask_instructions: Optional[str] = None,
130132
num_reasks: Optional[int] = None,
131133
tracer: Optional[Tracer] = None,
132134
name: Optional[str] = None,
@@ -251,6 +253,10 @@ async def __exec(
251253
"custom_reask_instructions",
252254
self._exec_opts.reask_instructions is not None,
253255
),
256+
(
257+
"custom_reask_messages",
258+
self._exec_opts.reask_messages is not None,
259+
),
254260
],
255261
is_parent=True, # It will have children
256262
has_parent=False, # Has no parents

guardrails/classes/execution/guard_execution_options.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@ class GuardExecutionOptions:
77
prompt: Optional[str] = None
88
instructions: Optional[str] = None
99
msg_history: Optional[List[Dict]] = None
10+
messages: Optional[List[Dict]] = None
1011
reask_prompt: Optional[str] = None
1112
reask_instructions: Optional[str] = None
13+
reask_messages: Optional[List[Dict]] = None
1214
num_reasks: Optional[int] = None

guardrails/classes/history/call.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
from guardrails.constants import error_status, fail_status, not_run_status, pass_status
1818
from guardrails.prompt.instructions import Instructions
1919
from guardrails.prompt.prompt import Prompt
20+
from guardrails.prompt.messages import Messages
2021
from guardrails.classes.validation.validator_logs import ValidatorLogs
2122
from guardrails.actions.reask import (
2223
ReAsk,
@@ -132,6 +133,25 @@ def compiled_instructions(self) -> Optional[str]:
132133
if instructions is not None:
133134
return instructions.format(**prompt_params).source
134135

136+
@property
137+
def reask_messages(self) -> Stack[Messages]:
138+
"""The compiled messages used during reasks.
139+
140+
Does not include the initial messages.
141+
"""
142+
if self.iterations.length > 0:
143+
reasks = self.iterations.copy()
144+
initial_messages = reasks.first
145+
reasks.remove(initial_messages) # type: ignore
146+
return Stack(
147+
*[
148+
r.inputs.messages if r.inputs.messages is not None else None
149+
for r in reasks
150+
]
151+
)
152+
153+
return Stack()
154+
135155
@property
136156
def reask_instructions(self) -> Stack[str]:
137157
"""The compiled instructions used during reasks.

guardrails/classes/history/inputs.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
from guardrails.llm_providers import PromptCallableBase
88
from guardrails.prompt.instructions import Instructions
99
from guardrails.prompt.prompt import Prompt
10+
from guardrails.prompt.messages import Messages
1011

1112

1213
class Inputs(IInputs, ArbitraryModel):
@@ -52,6 +53,10 @@ class Inputs(IInputs, ArbitraryModel):
5253
description="The message history provided by the user for chat model calls.",
5354
default=None,
5455
)
56+
messages: Optional[List[Messages]] = Field(
57+
description="The message history provided by the user for chat model calls.",
58+
default=None,
59+
)
5560
prompt_params: Optional[Dict] = Field(
5661
description="The parameters provided by the user"
5762
"that will be formatted into the final LLM prompt.",

guardrails/guard.py

Lines changed: 47 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -465,11 +465,13 @@ def from_pydantic(
465465
cls,
466466
output_class: ModelOrListOfModels,
467467
*,
468-
prompt: Optional[str] = None, # TODO: deprecate this in 0.5.1
469-
instructions: Optional[str] = None, # TODO: deprecate this in 0.5.1
468+
prompt: Optional[str] = None,
469+
instructions: Optional[str] = None,
470470
num_reasks: Optional[int] = None,
471-
reask_prompt: Optional[str] = None, # TODO: deprecate this in 0.5.1
472-
reask_instructions: Optional[str] = None, # TODO: deprecate this in 0.5.1
471+
reask_prompt: Optional[str] = None,
472+
reask_instructions: Optional[str] = None,
473+
reask_messages: Optional[List[Dict]] = None,
474+
messages: Optional[List[Dict]] = None,
473475
tracer: Optional[Tracer] = None,
474476
name: Optional[str] = None,
475477
description: Optional[str] = None,
@@ -485,6 +487,7 @@ def from_pydantic(
485487
instructions (str, optional): Instructions for chat models. Defaults to None.
486488
reask_prompt (str, optional): An alternative prompt to use during reasks. Defaults to None.
487489
reask_instructions (str, optional): Alternative instructions to use during reasks. Defaults to None.
490+
reask_messages (List[Dict], optional): A list of messages to use during reasks. Defaults to None.
488491
num_reasks (int, optional): The max times to re-ask the LLM if validation fails. Deprecated
489492
tracer (Tracer, optional): An OpenTelemetry tracer to use for metrics and traces. Defaults to None.
490493
name (str, optional): A unique name for this Guard. Defaults to `gr-` + the object id.
@@ -503,6 +506,19 @@ def from_pydantic(
503506
DeprecationWarning,
504507
)
505508

509+
if reask_instructions:
510+
warnings.warn(
511+
"reask_instructions is deprecated and will be removed in 0.6.x!"
512+
"Please be prepared to set reask_messages instead.",
513+
DeprecationWarning,
514+
)
515+
if reask_prompt:
516+
warnings.warn(
517+
"reask_prompt is deprecated and will be removed in 0.6.x!"
518+
"Please be prepared to set reask_messages instead.",
519+
DeprecationWarning,
520+
)
521+
506522
# We have to set the tracer in the ContextStore before the Rail,
507523
# and therefore the Validators, are initialized
508524
cls._set_tracer(cls, tracer) # type: ignore
@@ -513,6 +529,8 @@ def from_pydantic(
513529
instructions=instructions,
514530
reask_prompt=reask_prompt,
515531
reask_instructions=reask_instructions,
532+
reask_messages=reask_messages,
533+
messages=messages,
516534
)
517535
guard = cls(
518536
name=name,
@@ -548,10 +566,12 @@ def from_string(
548566
validators: Sequence[Validator],
549567
*,
550568
string_description: Optional[str] = None,
551-
prompt: Optional[str] = None, # TODO: deprecate this in 0.5.1
552-
instructions: Optional[str] = None, # TODO: deprecate this in 0.5.1
553-
reask_prompt: Optional[str] = None, # TODO: deprecate this in 0.5.1
554-
reask_instructions: Optional[str] = None, # TODO: deprecate this in 0.5.1
569+
prompt: Optional[str] = None,
570+
instructions: Optional[str] = None,
571+
reask_prompt: Optional[str] = None,
572+
reask_instructions: Optional[str] = None,
573+
reask_messages: Optional[List[Dict]] = None,
574+
messages: Optional[List[Dict]] = None,
555575
num_reasks: Optional[int] = None,
556576
tracer: Optional[Tracer] = None,
557577
name: Optional[str] = None,
@@ -566,11 +586,24 @@ def from_string(
566586
instructions (str, optional): Instructions for chat models. Defaults to None.
567587
reask_prompt (str, optional): An alternative prompt to use during reasks. Defaults to None.
568588
reask_instructions (str, optional): Alternative instructions to use during reasks. Defaults to None.
589+
reask_messages (List[Dict], optional): A list of messages to use during reasks. Defaults to None.
569590
num_reasks (int, optional): The max times to re-ask the LLM if validation fails. Deprecated
570591
tracer (Tracer, optional): An OpenTelemetry tracer to use for metrics and traces. Defaults to None.
571592
name (str, optional): A unique name for this Guard. Defaults to `gr-` + the object id.
572593
description (str, optional): A description for this Guard. Defaults to None.
573594
""" # noqa
595+
if reask_instructions:
596+
warnings.warn(
597+
"reask_instructions is deprecated and will be removed in 0.6.x!"
598+
"Please be prepared to set reask_messages instead.",
599+
DeprecationWarning,
600+
)
601+
if reask_prompt:
602+
warnings.warn(
603+
"reask_prompt is deprecated and will be removed in 0.6.x!"
604+
"Please be prepared to set reask_messages instead.",
605+
DeprecationWarning,
606+
)
574607

575608
if num_reasks:
576609
warnings.warn(
@@ -594,6 +627,8 @@ def from_string(
594627
instructions=instructions,
595628
reask_prompt=reask_prompt,
596629
reask_instructions=reask_instructions,
630+
reask_messages=reask_messages,
631+
messages=messages,
597632
)
598633
guard = cast(
599634
Guard[str],
@@ -696,6 +731,10 @@ def __exec(
696731
"custom_reask_instructions",
697732
self._exec_opts.reask_instructions is not None,
698733
),
734+
(
735+
"custom_reask_messages",
736+
self._exec_opts.reask_messages is not None,
737+
),
699738
],
700739
is_parent=True, # It will have children
701740
has_parent=False, # Has no parents

guardrails/llm_providers.py

Lines changed: 38 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@
1313
cast,
1414
)
1515

16+
import warnings
17+
1618
from guardrails_api_client.models import LLMResource
1719
from pydantic import BaseModel
1820

@@ -141,6 +143,12 @@ def _invoke_llm(
141143
*args,
142144
**kwargs,
143145
) -> LLMResponse:
146+
warnings.warn(
147+
"This callable is deprecated in favor of passing "
148+
"no callable and the model argument which utilizes LiteLLM"
149+
"for example guard(model='gpt-4.o', messages=[...], ...)",
150+
DeprecationWarning,
151+
)
144152
if "api_key" in kwargs:
145153
api_key = kwargs.pop("api_key")
146154
else:
@@ -199,6 +207,12 @@ def _invoke_llm(
199207
If `base_model` is passed, the chat engine will be used as a function
200208
on the base model.
201209
"""
210+
warnings.warn(
211+
"This callable is deprecated in favor of passing "
212+
"no callable and the model argument which utilizes LiteLLM"
213+
"for example guard(model='gpt-4.o', messages=[...], ...)",
214+
DeprecationWarning,
215+
)
202216
if msg_history is None and text is None:
203217
raise PromptCallableException(
204218
"You must pass in either `text` or `msg_history` to `guard.__call__`."
@@ -310,6 +324,12 @@ def _invoke_llm(
310324
)
311325
```
312326
""" # noqa
327+
warnings.warn(
328+
"This callable is deprecated in favor of passing "
329+
"no callable and the model argument which utilizes LiteLLM"
330+
"for example guard(model='command-r', messages=[...], ...)",
331+
DeprecationWarning,
332+
)
313333

314334
trace_input_messages = chat_prompt(prompt, kwargs.get("instructions"))
315335
if "instructions" in kwargs:
@@ -394,6 +414,12 @@ def _invoke_llm(
394414
...
395415
```
396416
"""
417+
warnings.warn(
418+
"This callable is deprecated in favor of passing "
419+
"no callable and the model argument which utilizes LiteLLM"
420+
"for example guard(model='claude-3-opus-20240229', messages=[...], ...)",
421+
DeprecationWarning,
422+
)
397423
try:
398424
import anthropic
399425
except ImportError:
@@ -925,6 +951,12 @@ async def invoke_llm(
925951
*args,
926952
**kwargs,
927953
):
954+
warnings.warn(
955+
"This callable is deprecated in favor of passing "
956+
"no callable and the model argument which utilizes LiteLLM"
957+
"for example guard(model='gpt-4.o', messages=[...], ...)",
958+
DeprecationWarning,
959+
)
928960
if "api_key" in kwargs:
929961
api_key = kwargs.pop("api_key")
930962
else:
@@ -976,7 +1008,12 @@ async def invoke_llm(
9761008
If `base_model` is passed, the chat engine will be used as a function
9771009
on the base model.
9781010
"""
979-
1011+
warnings.warn(
1012+
"This callable is deprecated in favor of passing "
1013+
"no callable and the model argument which utilizes LiteLLM"
1014+
"for example guard(model='gpt-4.o', messages=[...], ...)",
1015+
DeprecationWarning,
1016+
)
9801017
if msg_history is None and text is None:
9811018
raise PromptCallableException(
9821019
"You must pass in either `text` or `msg_history` to `guard.__call__`."

0 commit comments

Comments
 (0)