Skip to content

Commit 845c874

Browse files
committed
removed restrictions on MaxNumTokens in gpt-4-vision-preview
1 parent 553f12b commit 845c874

File tree

3 files changed

+3
-8
lines changed

3 files changed

+3
-8
lines changed

+llms/+internal/callOpenAIChatAPI.m

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,7 @@
131131

132132
nvpOptions = keys(dict);
133133
if strcmp(nvp.ModelName,'gpt-4-vision-preview')
134-
nvpOptions(ismember(nvpOptions,["MaxNumTokens","StopSequences"])) = [];
134+
nvpOptions(ismember(nvpOptions,"StopSequences")) = [];
135135
end
136136

137137
for opt = nvpOptions.'

examples/ExampleGPT4Vision.mlx

148 KB
Binary file not shown.

openAIChat.m

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -209,8 +209,8 @@
209209
% reproducible responses
210210
%
211211
% Currently, GPT-4 Turbo with vision does not support the message.name
212-
% parameter, functions/tools, response_format parameter, stop
213-
% sequences, and max_tokens
212+
% parameter, functions/tools, response_format parameter, and stop
213+
% sequences. It also has a low MaxNumTokens default, which can be overridden.
214214

215215
arguments
216216
this (1,1) openAIChat
@@ -221,11 +221,6 @@
221221
nvp.Seed {mustBeIntegerOrEmpty(nvp.Seed)} = []
222222
end
223223

224-
if nvp.MaxNumTokens ~= Inf && strcmp(this.ModelName,'gpt-4-vision-preview')
225-
error("llms:invalidOptionForModel", ...
226-
llms.utils.errorMessageCatalog.getMessage("llms:invalidOptionForModel", "MaxNumTokens", this.ModelName));
227-
end
228-
229224
toolChoice = convertToolChoice(this, nvp.ToolChoice);
230225
if ~isempty(nvp.ToolChoice) && strcmp(this.ModelName,'gpt-4-vision-preview')
231226
error("llms:invalidOptionForModel", ...

0 commit comments

Comments
 (0)