Skip to content

Commit 8a75d29

Browse files
Merge pull request #74 from matlab-deep-learning/main
Merge main into documentation.
2 parents ada0ff8 + 150d9c1 commit 8a75d29

File tree

92 files changed

+2447
-386
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

92 files changed

+2447
-386
lines changed

+llms/+internal/callAzureChatAPI.m

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@
6464

6565
parameters = buildParametersCall(messages, functions, nvp);
6666

67-
[response, streamedText] = llms.internal.sendRequest(parameters,nvp.APIKey, URL, nvp.TimeOut, nvp.StreamFun);
67+
[response, streamedText] = llms.internal.sendRequestWrapper(parameters,nvp.APIKey, URL, nvp.TimeOut, nvp.StreamFun);
6868

6969
% If call errors, "choices" will not be part of response.Body.Data, instead
7070
% we get response.Body.Data.error

+llms/+internal/callOllamaChatAPI.m

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@
5353

5454
parameters = buildParametersCall(model, messages, nvp);
5555

56-
[response, streamedText] = llms.internal.sendRequest(parameters,[],URL,nvp.TimeOut,nvp.StreamFun);
56+
[response, streamedText] = llms.internal.sendRequestWrapper(parameters,[],URL,nvp.TimeOut,nvp.StreamFun);
5757

5858
% If call errors, "choices" will not be part of response.Body.Data, instead
5959
% we get response.Body.Data.error

+llms/+internal/callOpenAIChatAPI.m

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@
6262

6363
parameters = buildParametersCall(messages, functions, nvp);
6464

65-
[response, streamedText] = llms.internal.sendRequest(parameters,nvp.APIKey, END_POINT, nvp.TimeOut, nvp.StreamFun);
65+
[response, streamedText] = llms.internal.sendRequestWrapper(parameters,nvp.APIKey, END_POINT, nvp.TimeOut, nvp.StreamFun);
6666

6767
% If call errors, "choices" will not be part of response.Body.Data, instead
6868
% we get response.Body.Data.error

+llms/+internal/sendRequestWrapper.m

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
function [response, streamedText] = sendRequestWrapper(varargin)
2+
% This function is undocumented and will change in a future release
3+
4+
% A wrapper around sendRequest to have a test seam
5+
[response, streamedText] = llms.internal.sendRequest(varargin{:});

+llms/+internal/textGenerator.m

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,4 +28,10 @@
2828
properties (Access=protected)
2929
StreamFun
3030
end
31+
32+
methods
33+
function hObj = set.StopSequences(hObj,value)
34+
hObj.StopSequences = string(value);
35+
end
36+
end
3137
end

+llms/+stream/responseStreamer.m

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,8 @@
8484
end
8585
this.StreamFun('');
8686
this.ResponseText = txt;
87-
else
87+
elseif isfield(json.choices,"delta") && ...
88+
isfield(json.choices.delta,"content")
8889
txt = json.choices.delta.content;
8990
this.StreamFun(txt);
9091
this.ResponseText = [this.ResponseText txt];

+llms/+utils/errorMessageCatalog.m

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -41,22 +41,22 @@
4141
catalog("llms:mustBeAssistantWithIdAndFunction") = "Field 'tool_call' must be a struct with fields 'id' and 'function'.";
4242
catalog("llms:mustBeAssistantWithNameAndArguments") = "Field 'function' must be a struct with fields 'name' and 'arguments'.";
4343
catalog("llms:assistantMustHaveTextNameAndArguments") = "Fields 'name' and 'arguments' must be text with one or more characters.";
44-
catalog("llms:mustBeValidIndex") = "Index exceeds the number of array elements. Index must be less than or equal to ({1}).";
45-
catalog("llms:stopSequencesMustHaveMax4Elements") = "Number of elements must not be larger than 4.";
44+
catalog("llms:mustBeValidIndex") = "Index exceeds the number of array elements. Index must be less than or equal to {1}.";
45+
catalog("llms:stopSequencesMustHaveMax4Elements") = "Number of stop sequences must be less than or equal to 4.";
4646
catalog("llms:endpointMustBeSpecified") = "Unable to find endpoint. Either set environment variable AZURE_OPENAI_ENDPOINT or specify name-value argument ""Endpoint"".";
4747
catalog("llms:deploymentMustBeSpecified") = "Unable to find deployment name. Either set environment variable AZURE_OPENAI_DEPLOYMENT or specify name-value argument ""Deployment"".";
4848
catalog("llms:keyMustBeSpecified") = "Unable to find API key. Either set environment variable {1} or specify name-value argument ""APIKey"".";
49-
catalog("llms:mustHaveMessages") = "Value must contain at least one message in Messages.";
49+
catalog("llms:mustHaveMessages") = "Message history must not be empty.";
5050
catalog("llms:mustSetFunctionsForCall") = "When no functions are defined, ToolChoice must not be specified.";
51-
catalog("llms:mustBeMessagesOrTxt") = "Messages must be text with one or more characters or a messageHistory object.";
52-
catalog("llms:invalidOptionAndValueForModel") = "'{1}' with value '{2}' is not supported for ModelName '{3}'";
53-
catalog("llms:invalidOptionForModel") = "{1} is not supported for ModelName '{2}'";
54-
catalog("llms:invalidContentTypeForModel") = "{1} is not supported for ModelName '{2}'";
55-
catalog("llms:functionNotAvailableForModel") = "This function is not supported for ModelName '{1}'";
56-
catalog("llms:promptLimitCharacter") = "Prompt must have a maximum length of {1} characters for ModelName '{2}'";
57-
catalog("llms:pngExpected") = "Argument must be a PNG image.";
51+
catalog("llms:mustBeMessagesOrTxt") = "Message must be nonempty string, character array, cell array of character vectors, or messageHistory object.";
52+
catalog("llms:invalidOptionAndValueForModel") = "'{1}' with value '{2}' is not supported for model ""{3}"".";
53+
catalog("llms:invalidOptionForModel") = "Invalid argument name {1} for model ""{2}"".";
54+
catalog("llms:invalidContentTypeForModel") = "{1} is not supported for model ""{2}"".";
55+
catalog("llms:functionNotAvailableForModel") = "Image editing is not supported for model ""{1}"".";
56+
catalog("llms:promptLimitCharacter") = "Prompt must contain at most {1} characters for model ""{2}"".";
57+
catalog("llms:pngExpected") = "Image must be a PNG file (*.png).";
5858
catalog("llms:warningJsonInstruction") = "When using JSON mode, you must also prompt the model to produce JSON yourself via a system or user message.";
59-
catalog("llms:apiReturnedError") = "Server error: ""{1}""";
59+
catalog("llms:apiReturnedError") = "Server returned error indicating: ""{1}""";
6060
catalog("llms:dimensionsMustBeSmallerThan") = "Dimensions must be less than or equal to {1}.";
6161
catalog("llms:stream:responseStreamer:InvalidInput") = "Input does not have the expected json format, got ""{1}"".";
6262
end

+llms/+utils/mustBeValidStop.m

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ function mustBeValidStop(value)
55
if ~isempty(value)
66
mustBeVector(value);
77
mustBeNonzeroLengthText(value);
8+
value = string(value);
89
% This restriction is set by the OpenAI API
910
if numel(value)>4
1011
error("llms:stopSequencesMustHaveMax4Elements", llms.utils.errorMessageCatalog.getMessage("llms:stopSequencesMustHaveMax4Elements"));

.githooks/pre-commit

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
#!/bin/bash
2+
3+
cd $(git rev-parse --show-toplevel)
4+
pwd
5+
6+
# For all commits of mlx files, create corresponding Markdown (md) files.
7+
# If the mlx files are in .../mlx-scripts/*.mlx, the corresponding
8+
# md files will go into .../*.md.
9+
#
10+
# This script assumes that the mlx files as currently in the file system
11+
# are what is being committed, instead of doing a lot of extra work to
12+
# get them from the stage area.
13+
#
14+
# Note that this script will not remove media files. If an mlx has
15+
# fewer plots at some point in the future, there will be file system
16+
# cruft. Which doesn't hurt the md display in GitHub or elswehere.
17+
changedMlxFiles=`git diff --cached --name-only --diff-filter=d '*.mlx'`
18+
19+
if [ -n "$changedMlxFiles" ]; then
20+
# Keep the line break here, we replace end-of-line with "' '" to get the quotes right
21+
matlab -batch "for file = {'${changedMlxFiles//
22+
/' '}'}, export(file{1},replace(erase(file{1},'mlx-scripts'),'.mlx','.md')); end"
23+
tmp=${changedMlxFiles//mlx-scripts\//}
24+
mdFiles=${tmp//.mlx/.md}
25+
for file in $mdFiles; do
26+
if [ -d ${file%.md}_media ]; then
27+
git add ${file%.md}_media/
28+
fi
29+
perl -pi -e "\$cnt++ if /^#/; " \
30+
-e "\$_ .= \"\nTo run the code shown on this page, open the MLX file in MATLAB®: [mlx-scripts/$(basename $file .md).mlx](mlx-scripts/$(basename $file .md).mlx) \n\" if /^#/ && \$cnt==1;" \
31+
$file
32+
done
33+
git add $mdFiles
34+
fi

.github/CODEOWNERS

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
# Code owners, to get auto-filled reviewer lists
2+
3+
# To start with, we just assume everyone in the core team is included on all reviews
4+
* @adulai @ccreutzi @debymf @MiriamScharnke @vpapanasta

0 commit comments

Comments
 (0)