File tree Expand file tree Collapse file tree 8 files changed +15
-15
lines changed Expand file tree Collapse file tree 8 files changed +15
-15
lines changed Original file line number Diff line number Diff line change @@ -99,7 +99,7 @@ def invoke(
99
99
Args:
100
100
input (str): The text to send to the LLM.
101
101
message_history (Optional[list]): A collection previous messages, with each message having a specific role assigned.
102
- system_instruction (Optional[str]): An option to override the llm system message for this invokation .
102
+ system_instruction (Optional[str]): An option to override the llm system message for this invocation .
103
103
104
104
Returns:
105
105
LLMResponse: The response from the LLM.
@@ -132,7 +132,7 @@ async def ainvoke(
132
132
Args:
133
133
input (str): The text to send to the LLM.
134
134
message_history (Optional[list]): A collection previous messages, with each message having a specific role assigned.
135
- system_instruction (Optional[str]): An option to override the llm system message for this invokation .
135
+ system_instruction (Optional[str]): An option to override the llm system message for this invocation .
136
136
137
137
Returns:
138
138
LLMResponse: The response from the LLM.
Original file line number Diff line number Diff line change @@ -53,7 +53,7 @@ def invoke(
53
53
Args:
54
54
input (str): Text sent to the LLM.
55
55
message_history (Optional[list]): A collection previous messages, with each message having a specific role assigned.
56
- system_instruction (Optional[str]): An option to override the llm system message for this invokation .
56
+ system_instruction (Optional[str]): An option to override the llm system message for this invocation .
57
57
58
58
Returns:
59
59
LLMResponse: The response from the LLM.
@@ -74,7 +74,7 @@ async def ainvoke(
74
74
Args:
75
75
input (str): Text sent to the LLM.
76
76
message_history (Optional[list]): A collection previous messages, with each message having a specific role assigned.
77
- system_instruction (Optional[str]): An option to override the llm system message for this invokation .
77
+ system_instruction (Optional[str]): An option to override the llm system message for this invocation .
78
78
79
79
Returns:
80
80
LLMResponse: The response from the LLM.
Original file line number Diff line number Diff line change @@ -104,7 +104,7 @@ def invoke(
104
104
Args:
105
105
input (str): The text to send to the LLM.
106
106
message_history (Optional[list]): A collection previous messages, with each message having a specific role assigned.
107
- system_instruction (Optional[str]): An option to override the llm system message for this invokation .
107
+ system_instruction (Optional[str]): An option to override the llm system message for this invocation .
108
108
109
109
Returns:
110
110
LLMResponse: The response from the LLM.
@@ -132,7 +132,7 @@ async def ainvoke(
132
132
Args:
133
133
input (str): The text to send to the LLM.
134
134
message_history (Optional[list]): A collection previous messages, with each message having a specific role assigned.
135
- system_instruction (Optional[str]): An option to override the llm system message for this invokation .
135
+ system_instruction (Optional[str]): An option to override the llm system message for this invocation .
136
136
137
137
Returns:
138
138
LLMResponse: The response from the LLM.
Original file line number Diff line number Diff line change @@ -95,7 +95,7 @@ def invoke(
95
95
Args:
96
96
input (str): Text sent to the LLM.
97
97
message_history (Optional[list]): A collection previous messages, with each message having a specific role assigned.
98
- system_instruction (Optional[str]): An option to override the llm system message for this invokation .
98
+ system_instruction (Optional[str]): An option to override the llm system message for this invocation .
99
99
100
100
Returns:
101
101
LLMResponse: The response from MistralAI.
@@ -131,7 +131,7 @@ async def ainvoke(
131
131
Args:
132
132
input (str): Text sent to the LLM.
133
133
message_history (Optional[list]): A collection previous messages, with each message having a specific role assigned.
134
- system_instruction (Optional[str]): An option to override the llm system message for this invokation .
134
+ system_instruction (Optional[str]): An option to override the llm system message for this invocation .
135
135
136
136
Returns:
137
137
LLMResponse: The response from MistralAI.
Original file line number Diff line number Diff line change @@ -86,7 +86,7 @@ def invoke(
86
86
Args:
87
87
input (str): The text to send to the LLM.
88
88
message_history (Optional[list]): A collection previous messages, with each message having a specific role assigned.
89
- system_instruction (Optional[str]): An option to override the llm system message for this invokation .
89
+ system_instruction (Optional[str]): An option to override the llm system message for this invocation .
90
90
91
91
Returns:
92
92
LLMResponse: The response from the LLM.
@@ -114,7 +114,7 @@ async def ainvoke(
114
114
Args:
115
115
input (str): Text sent to the LLM.
116
116
message_history (Optional[list]): A collection previous messages, with each message having a specific role assigned.
117
- system_instruction (Optional[str]): An option to override the llm system message for this invokation .
117
+ system_instruction (Optional[str]): An option to override the llm system message for this invocation .
118
118
119
119
Returns:
120
120
LLMResponse: The response from OpenAI.
Original file line number Diff line number Diff line change @@ -95,7 +95,7 @@ def invoke(
95
95
Args:
96
96
input (str): Text sent to the LLM.
97
97
message_history (Optional[list]): A collection previous messages, with each message having a specific role assigned.
98
- system_instruction (Optional[str]): An option to override the llm system message for this invokation .
98
+ system_instruction (Optional[str]): An option to override the llm system message for this invocation .
99
99
100
100
Returns:
101
101
LLMResponse: The response from OpenAI.
@@ -126,7 +126,7 @@ async def ainvoke(
126
126
Args:
127
127
input (str): Text sent to the LLM.
128
128
message_history (Optional[list]): A collection previous messages, with each message having a specific role assigned.
129
- system_instruction (Optional[str]): An option to override the llm system message for this invokation .
129
+ system_instruction (Optional[str]): An option to override the llm system message for this invocation .
130
130
131
131
Returns:
132
132
LLMResponse: The response from OpenAI.
Original file line number Diff line number Diff line change @@ -114,7 +114,7 @@ def invoke(
114
114
Args:
115
115
input (str): The text to send to the LLM.
116
116
message_history (Optional[list]): A collection previous messages, with each message having a specific role assigned.
117
- system_instruction (Optional[str]): An option to override the llm system message for this invokation .
117
+ system_instruction (Optional[str]): An option to override the llm system message for this invocation .
118
118
119
119
Returns:
120
120
LLMResponse: The response from the LLM.
@@ -143,7 +143,7 @@ async def ainvoke(
143
143
Args:
144
144
input (str): The text to send to the LLM.
145
145
message_history (Optional[list]): A collection previous messages, with each message having a specific role assigned.
146
- system_instruction (Optional[str]): An option to override the llm system message for this invokation .
146
+ system_instruction (Optional[str]): An option to override the llm system message for this invocation .
147
147
148
148
Returns:
149
149
LLMResponse: The response from the LLM.
Original file line number Diff line number Diff line change @@ -96,7 +96,7 @@ def test_mistralai_llm_invoke_with_message_history_and_system_instruction(
96
96
]
97
97
question = "What about next season?"
98
98
99
- # first invokation - initial instructions
99
+ # first invocation - initial instructions
100
100
res = llm .invoke (question , message_history , system_instruction = system_instruction ) # type: ignore
101
101
assert isinstance (res , LLMResponse )
102
102
assert res .content == "mistral response"
You can’t perform that action at this time.
0 commit comments