Skip to content

.Net: Add functionality to create and import plugins with YAML functions #10840

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ public void ItShouldCreateFunctionFromPromptYamlWithNoExecutionSettings()
{
// Arrange
// Act
var function = KernelFunctionYaml.FromPromptYaml(this._yamlNoExecutionSettings);
var function = KernelFunctionYaml.FromPromptYaml(YAMLNoExecutionSettings);

// Assert
Assert.NotNull(function);
Expand All @@ -47,7 +47,7 @@ public void ItShouldCreateFunctionFromPromptYaml()
{
// Arrange
// Act
var function = KernelFunctionYaml.FromPromptYaml(this._yaml);
var function = KernelFunctionYaml.FromPromptYaml(YAML);

// Assert
Assert.NotNull(function);
Expand All @@ -60,7 +60,7 @@ public void ItShouldCreateFunctionFromPromptYamlWithCustomExecutionSettings()
{
// Arrange
// Act
var function = KernelFunctionYaml.FromPromptYaml(this._yamlWithCustomSettings);
var function = KernelFunctionYaml.FromPromptYaml(YAMLWithCustomSettings);

// Assert
Assert.NotNull(function);
Expand All @@ -77,7 +77,7 @@ public void ItShouldSupportCreatingOpenAIExecutionSettings()
.WithNamingConvention(UnderscoredNamingConvention.Instance)
.WithTypeConverter(new PromptExecutionSettingsTypeConverter())
.Build();
var promptFunctionModel = deserializer.Deserialize<PromptTemplateConfig>(this._yaml);
var promptFunctionModel = deserializer.Deserialize<PromptTemplateConfig>(YAML);

// Act
var executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(promptFunctionModel.ExecutionSettings["service1"]);
Expand All @@ -93,7 +93,7 @@ public void ItShouldSupportCreatingOpenAIExecutionSettings()
public void ItShouldDeserializeAutoFunctionChoiceBehaviors()
{
// Act
var promptTemplateConfig = KernelFunctionYaml.ToPromptTemplateConfig(this._yaml);
var promptTemplateConfig = KernelFunctionYaml.ToPromptTemplateConfig(YAML);

// Assert
Assert.NotNull(promptTemplateConfig?.ExecutionSettings);
Expand All @@ -114,7 +114,7 @@ public void ItShouldDeserializeAutoFunctionChoiceBehaviors()
public void ItShouldDeserializeRequiredFunctionChoiceBehaviors()
{
// Act
var promptTemplateConfig = KernelFunctionYaml.ToPromptTemplateConfig(this._yaml);
var promptTemplateConfig = KernelFunctionYaml.ToPromptTemplateConfig(YAML);

// Assert
Assert.NotNull(promptTemplateConfig?.ExecutionSettings);
Expand All @@ -135,7 +135,7 @@ public void ItShouldDeserializeRequiredFunctionChoiceBehaviors()
public void ItShouldDeserializeNoneFunctionChoiceBehaviors()
{
// Act
var promptTemplateConfig = KernelFunctionYaml.ToPromptTemplateConfig(this._yaml);
var promptTemplateConfig = KernelFunctionYaml.ToPromptTemplateConfig(YAML);

// Assert
Assert.NotNull(promptTemplateConfig?.ExecutionSettings);
Expand All @@ -156,7 +156,7 @@ public void ItShouldDeserializeNoneFunctionChoiceBehaviors()
public void ItShouldCreateFunctionWithDefaultValueOfStringType()
{
// Act
var function = KernelFunctionYaml.FromPromptYaml(this._yamlWithCustomSettings);
var function = KernelFunctionYaml.FromPromptYaml(YAMLWithCustomSettings);

// Assert
Assert.NotNull(function?.Metadata?.Parameters);
Expand Down Expand Up @@ -192,98 +192,98 @@ string CreateYaml(object defaultValue)
Assert.Throws<NotSupportedException>(() => KernelFunctionYaml.FromPromptYaml(CreateYaml(new { p1 = "v1" })));
}

private readonly string _yamlNoExecutionSettings = @"
template_format: semantic-kernel
template: Say hello world to {{$name}} in {{$language}}
description: Say hello to the specified person using the specified language
name: SayHello
input_variables:
- name: name
description: The name of the person to greet
default: John
- name: language
description: The language to generate the greeting in
default: English
";

private readonly string _yaml = """
template_format: semantic-kernel
template: Say hello world to {{$name}} in {{$language}}
description: Say hello to the specified person using the specified language
name: SayHello
input_variables:
- name: name
description: The name of the person to greet
default: John
- name: language
description: The language to generate the greeting in
default: English
execution_settings:
service1:
model_id: gpt-4
temperature: 1.0
top_p: 0.0
presence_penalty: 0.0
frequency_penalty: 0.0
max_tokens: 256
stop_sequences: []
function_choice_behavior:
type: auto
functions:
- p1.f1
service2:
model_id: gpt-3.5
temperature: 1.0
top_p: 0.0
presence_penalty: 0.0
frequency_penalty: 0.0
max_tokens: 256
stop_sequences: [ "foo", "bar", "baz" ]
function_choice_behavior:
type: required
functions:
- p2.f2
service3:
model_id: gpt-3.5
temperature: 1.0
top_p: 0.0
presence_penalty: 0.0
frequency_penalty: 0.0
max_tokens: 256
stop_sequences: [ "foo", "bar", "baz" ]
function_choice_behavior:
type: none
functions:
- p3.f3
""";

private readonly string _yamlWithCustomSettings = """
template_format: semantic-kernel
template: Say hello world to {{$name}} in {{$language}}
description: Say hello to the specified person using the specified language
name: SayHello
input_variables:
- name: name
description: The name of the person to greet
default: John
- name: language
description: The language to generate the greeting in
default: English
execution_settings:
service1:
model_id: gpt-4
temperature: 1.0
top_p: 0.0
presence_penalty: 0.0
frequency_penalty: 0.0
max_tokens: 256
stop_sequences: []
service2:
model_id: random-model
temperaturex: 1.0
top_q: 0.0
rando_penalty: 0.0
max_token_count: 256
stop_sequences: [ "foo", "bar", "baz" ]
""";
private const string YAMLNoExecutionSettings = """
template_format: semantic-kernel
template: Say hello world to {{$name}} in {{$language}}
description: Say hello to the specified person using the specified language
name: SayHello
input_variables:
- name: name
description: The name of the person to greet
default: John
- name: language
description: The language to generate the greeting in
default: English
""";

private const string YAML = """
template_format: semantic-kernel
template: Say hello world to {{$name}} in {{$language}}
description: Say hello to the specified person using the specified language
name: SayHello
input_variables:
- name: name
description: The name of the person to greet
default: John
- name: language
description: The language to generate the greeting in
default: English
execution_settings:
service1:
model_id: gpt-4
temperature: 1.0
top_p: 0.0
presence_penalty: 0.0
frequency_penalty: 0.0
max_tokens: 256
stop_sequences: []
function_choice_behavior:
type: auto
functions:
- p1.f1
service2:
model_id: gpt-3.5
temperature: 1.0
top_p: 0.0
presence_penalty: 0.0
frequency_penalty: 0.0
max_tokens: 256
stop_sequences: [ "foo", "bar", "baz" ]
function_choice_behavior:
type: required
functions:
- p2.f2
service3:
model_id: gpt-3.5
temperature: 1.0
top_p: 0.0
presence_penalty: 0.0
frequency_penalty: 0.0
max_tokens: 256
stop_sequences: [ "foo", "bar", "baz" ]
function_choice_behavior:
type: none
functions:
- p3.f3
""";

private const string YAMLWithCustomSettings = """
template_format: semantic-kernel
template: Say hello world to {{$name}} in {{$language}}
description: Say hello to the specified person using the specified language
name: SayHello
input_variables:
- name: name
description: The name of the person to greet
default: John
- name: language
description: The language to generate the greeting in
default: English
execution_settings:
service1:
model_id: gpt-4
temperature: 1.0
top_p: 0.0
presence_penalty: 0.0
frequency_penalty: 0.0
max_tokens: 256
stop_sequences: []
service2:
model_id: random-model
temperaturex: 1.0
top_q: 0.0
rando_penalty: 0.0
max_token_count: 256
stop_sequences: [ "foo", "bar", "baz" ]
""";
}
Loading
Loading