Skip to content

Commit d7b9697

Browse files
committed
refactor(spring-ai-openai): fix code style
Signed-off-by: SenreySong <[email protected]>
1 parent 94bc79d commit d7b9697

File tree

3 files changed

+44
-43
lines changed

3 files changed

+44
-43
lines changed

models/spring-ai-openai/src/main/java/org/springframework/ai/openai/OpenAiChatModel.java

Lines changed: 36 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -220,7 +220,7 @@ public ChatResponse internalCall(Prompt prompt, ChatResponse previousChatRespons
220220
"index", choice.index() != null ? choice.index() : 0,
221221
"finishReason", getFinishReasonJson(choice.finishReason()),
222222
"refusal", StringUtils.hasText(choice.message().refusal()) ? choice.message().refusal() : "",
223-
"annotations", choice.message().annotations() != null? choice.message().annotations() : List.of(Map.of()));
223+
"annotations", choice.message().annotations() != null ? choice.message().annotations() : List.of(Map.of()));
224224
return buildGeneration(choice, metadata, request);
225225
}).toList();
226226
// @formatter:on
@@ -306,23 +306,22 @@ public Flux<ChatResponse> internalStream(Prompt prompt, ChatResponse previousCha
306306
Flux<ChatResponse> chatResponse = completionChunks.map(this::chunkToChatCompletion)
307307
.switchMap(chatCompletion -> Mono.just(chatCompletion).map(chatCompletion2 -> {
308308
try {
309-
310309
// If an id is not provided, set to "NO_ID" (for compatible APIs).
311310
String id = chatCompletion2.id() == null ? "NO_ID" : chatCompletion2.id();
312311

313312
List<Generation> generations = chatCompletion2.choices().stream().map(choice -> { // @formatter:off
314-
if (choice.message().role() != null) {
315-
roleMap.putIfAbsent(id, choice.message().role().name());
316-
}
317-
Map<String, Object> metadata = Map.of(
318-
"id", id,
319-
"role", roleMap.getOrDefault(id, ""),
320-
"index", choice.index() != null ? choice.index() : 0,
321-
"finishReason", getFinishReasonJson(choice.finishReason()),
322-
"refusal", StringUtils.hasText(choice.message().refusal()) ? choice.message().refusal() : "",
323-
"annotations", choice.message().annotations() != null ? choice.message().annotations() : List.of(),
324-
"reasoningContent", choice.message().reasoningContent() != null ? choice.message().reasoningContent() : "");
325-
return buildGeneration(choice, metadata, request);
313+
if (choice.message().role() != null) {
314+
roleMap.putIfAbsent(id, choice.message().role().name());
315+
}
316+
Map<String, Object> metadata = Map.of(
317+
"id", id,
318+
"role", roleMap.getOrDefault(id, ""),
319+
"index", choice.index() != null ? choice.index() : 0,
320+
"finishReason", getFinishReasonJson(choice.finishReason()),
321+
"refusal", StringUtils.hasText(choice.message().refusal()) ? choice.message().refusal() : "",
322+
"annotations", choice.message().annotations() != null ? choice.message().annotations() : List.of(),
323+
"reasoningContent", choice.message().reasoningContent() != null ? choice.message().reasoningContent() : "");
324+
return buildGeneration(choice, metadata, request);
326325
}).toList();
327326
// @formatter:on
328327
OpenAiApi.Usage usage = chatCompletion2.usage();
@@ -366,37 +365,37 @@ public Flux<ChatResponse> internalStream(Prompt prompt, ChatResponse previousCha
366365
// @formatter:off
367366
Flux<ChatResponse> flux = chatResponse.flatMap(response -> {
368367
if (this.toolExecutionEligibilityPredicate.isToolExecutionRequired(prompt.getOptions(), response)) {
369-
// FIXME: bounded elastic needs to be used since
370-
// tool calling
371-
// is currently only synchronous
368+
// FIXME: bounded elastic needs to be used since tool calling
369+
// is currently only synchronous
372370
return Flux.deferContextual(ctx -> {
373-
ToolExecutionResult toolExecutionResult;
374-
try {
375-
ToolCallReactiveContextHolder.setContext(ctx);
376-
toolExecutionResult = this.toolCallingManager.executeToolCalls(prompt, response);
377-
}
378-
finally {
379-
ToolCallReactiveContextHolder.clearContext();
380-
}
381-
if (toolExecutionResult.returnDirect()) {
382-
// Return tool execution result directly to the client.
383-
return Flux.just(ChatResponse.builder().from(response)
371+
ToolExecutionResult toolExecutionResult;
372+
try {
373+
ToolCallReactiveContextHolder.setContext(ctx);
374+
toolExecutionResult = this.toolCallingManager.executeToolCalls(prompt, response);
375+
}
376+
finally {
377+
ToolCallReactiveContextHolder.clearContext();
378+
}
379+
if (toolExecutionResult.returnDirect()) {
380+
// Return tool execution result directly to the client.
381+
return Flux.just(ChatResponse.builder().from(response)
384382
.generations(ToolExecutionResult.buildGenerations(toolExecutionResult))
385-
.build());
386-
} else {
387-
// Send the tool execution result back to the model.
388-
return this.internalStream(new Prompt(toolExecutionResult.conversationHistory(), prompt.getOptions()),
389-
response);
390-
}
383+
.build());
384+
} else {
385+
// Send the tool execution result back to the model.
386+
return this.internalStream(new Prompt(toolExecutionResult.conversationHistory(), prompt.getOptions()),
387+
response);
388+
}
391389
}).subscribeOn(Schedulers.boundedElastic());
392-
} else {
393-
return Flux.just(response);
390+
}
391+
else {
392+
return Flux.just(response);
394393
}
395394
})
396395
.doOnError(observation::error)
397396
.doFinally(s -> observation.stop())
398397
.contextWrite(ctx -> ctx.put(ObservationThreadLocalAccessor.KEY, observation));
399-
// @formatter:on
398+
// @formatter:on
400399

401400
return new MessageAggregator().aggregate(flux, observationContext::setResponse);
402401

models/spring-ai-openai/src/main/java/org/springframework/ai/openai/OpenAiChatOptions.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -137,6 +137,7 @@ public class OpenAiChatOptions implements ToolCallingChatOptions {
137137
* modalities: ["audio"]
138138
* Note: that the audio modality is only available for the gpt-4o-audio-preview model
139139
* and is not supported for streaming completions.
140+
*
140141
*/
141142
private @JsonProperty("audio") AudioParameters outputAudio;
142143

models/spring-ai-openai/src/main/java/org/springframework/ai/openai/api/OpenAiApi.java

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -263,9 +263,9 @@ public Flux<ChatCompletionChunk> chatCompletionStream(ChatCompletionRequest chat
263263
}
264264
Object dynamicBody = createDynamicRequestBody(chatRequest);
265265
// @formatter:off
266-
return this.webClient.post()
267-
.uri(this.completionsPath)
268-
.headers(headers -> {
266+
return this.webClient.post()
267+
.uri(this.completionsPath)
268+
.headers(headers -> {
269269
headers.addAll(additionalHttpHeader);
270270
addDefaultHeadersIfMissing(headers);
271271
}) // @formatter:on
@@ -1123,7 +1123,7 @@ public enum OutputModality {
11231123
* @param verbosity Controls the verbosity of the model's response.
11241124
*/
11251125
@JsonInclude(Include.NON_NULL)
1126-
public record ChatCompletionRequest( // @formatter:off
1126+
public record ChatCompletionRequest(// @formatter:off
11271127
@JsonProperty("messages") List<ChatCompletionMessage> messages,
11281128
@JsonProperty("model") String model,
11291129
@JsonProperty("store") Boolean store,
@@ -1440,7 +1440,7 @@ public String getValue() {
14401440
*/
14411441
@JsonInclude(Include.NON_NULL)
14421442
@JsonIgnoreProperties(ignoreUnknown = true)
1443-
public record ChatCompletionMessage( // @formatter:off
1443+
public record ChatCompletionMessage(// @formatter:off
14441444
@JsonProperty("content") Object rawContent,
14451445
@JsonProperty("role") Role role,
14461446
@JsonProperty("name") String name,
@@ -1450,7 +1450,8 @@ public record ChatCompletionMessage( // @formatter:off
14501450
@JsonProperty("refusal") String refusal,
14511451
@JsonProperty("audio") AudioOutput audioOutput,
14521452
@JsonProperty("annotations") List<Annotation> annotations,
1453-
@JsonProperty("reasoning_content") String reasoningContent) { // @formatter:on
1453+
@JsonProperty("reasoning_content") String reasoningContent
1454+
) { // @formatter:on
14541455

14551456
/**
14561457
* Create a chat completion message with the given content and role. All other

0 commit comments

Comments
 (0)