From a3d869b3199a94bb9017bd3e6658c318a42a7006 Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Mon, 24 Mar 2025 14:12:35 -0700 Subject: [PATCH 01/22] Define HybridParams --- packages/vertexai/src/types/ai.ts | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 packages/vertexai/src/types/ai.ts diff --git a/packages/vertexai/src/types/ai.ts b/packages/vertexai/src/types/ai.ts new file mode 100644 index 00000000000..30b20863373 --- /dev/null +++ b/packages/vertexai/src/types/ai.ts @@ -0,0 +1,21 @@ +/** + * Shims @types/dom-chromium-ai + * TODO: replace with @types/dom-chromium-ai once we can use es2020.intl. + */ +interface AILanguageModelCreateOptions { + topK?: number; + temperature?: number; +} + +export interface AILanguageModelCreateOptionsWithSystemPrompt + extends AILanguageModelCreateOptions { + systemPrompt?: string; + initialPrompts?: AILanguageModelPrompt[]; +} + +type AILanguageModelPromptRole = 'user' | 'assistant'; + +interface AILanguageModelPrompt { + role: AILanguageModelPromptRole; + content: string; +} From 14eee1652585d3a00d667a9ade691f889615693c Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Mon, 24 Mar 2025 16:25:35 -0700 Subject: [PATCH 02/22] Copy over most types from @types package --- packages/vertexai/src/types/ai.ts | 378 +++++++++++++++++++++++++++++- 1 file changed, 369 insertions(+), 9 deletions(-) diff --git a/packages/vertexai/src/types/ai.ts b/packages/vertexai/src/types/ai.ts index 30b20863373..86ce44b4d98 100644 --- a/packages/vertexai/src/types/ai.ts +++ b/packages/vertexai/src/types/ai.ts @@ -2,20 +2,380 @@ * Shims @types/dom-chromium-ai * TODO: replace with @types/dom-chromium-ai once we can use es2020.intl. */ +interface AI { + readonly languageModel: AILanguageModelFactory; + readonly summarizer: AISummarizerFactory; + readonly writer: AIWriterFactory; + readonly rewriter: AIRewriterFactory; + readonly translator: AITranslatorFactory; + readonly languageDetector: AILanguageDetectorFactory; +} + +interface AICreateMonitor extends EventTarget { + ondownloadprogress: ((this: AICreateMonitor, ev: DownloadProgressEvent) => any) | null; + + addEventListener( + type: K, + listener: (this: AICreateMonitor, ev: AICreateMonitorEventMap[K]) => any, + options?: boolean | AddEventListenerOptions, + ): void; + addEventListener( + type: string, + listener: EventListenerOrEventListenerObject, + options?: boolean | AddEventListenerOptions, + ): void; + removeEventListener( + type: K, + listener: (this: AICreateMonitor, ev: AICreateMonitorEventMap[K]) => any, + options?: boolean | EventListenerOptions, + ): void; + removeEventListener( + type: string, + listener: EventListenerOrEventListenerObject, + options?: boolean | EventListenerOptions, + ): void; +} + +interface DownloadProgressEvent extends Event { + readonly loaded: number; + readonly total: number; +} + +interface AICreateMonitorEventMap { + downloadprogress: DownloadProgressEvent; +} + +type AICreateMonitorCallback = (monitor: AICreateMonitor) => void; + +type AICapabilityAvailability = "readily" | "after-download" | "no"; + +// Language Model +// https://github.com/explainers-by-googlers/prompt-api/#full-api-surface-in-web-idl + +interface AILanguageModelFactory { + create( + options?: AILanguageModelCreateOptionsWithSystemPrompt | AILanguageModelCreateOptionsWithoutSystemPrompt, + ): Promise; + capabilities(): Promise; +} + interface AILanguageModelCreateOptions { - topK?: number; - temperature?: number; + signal?: AbortSignal; + monitor?: AICreateMonitorCallback; + + topK?: number; + temperature?: number; } -export interface AILanguageModelCreateOptionsWithSystemPrompt - extends AILanguageModelCreateOptions { - systemPrompt?: string; - initialPrompts?: AILanguageModelPrompt[]; +export interface AILanguageModelCreateOptionsWithSystemPrompt extends AILanguageModelCreateOptions { + systemPrompt?: string; + initialPrompts?: AILanguageModelPrompt[]; } -type AILanguageModelPromptRole = 'user' | 'assistant'; +interface AILanguageModelCreateOptionsWithoutSystemPrompt extends AILanguageModelCreateOptions { + systemPrompt?: never; + initialPrompts?: + | [AILanguageModelSystemPrompt, ...AILanguageModelPrompt[]] + | AILanguageModelPrompt[]; +} + +type AILanguageModelPromptRole = "user" | "assistant"; +type AILanguageModelInitialPromptRole = "system" | AILanguageModelPromptRole; interface AILanguageModelPrompt { - role: AILanguageModelPromptRole; - content: string; + role: AILanguageModelPromptRole; + content: string; +} + +interface AILanguageModelInitialPrompt { + role: AILanguageModelInitialPromptRole; + content: string; +} + +interface AILanguageModelSystemPrompt extends AILanguageModelInitialPrompt { + role: "system"; +} + +type AILanguageModelPromptInput = string | AILanguageModelPrompt | AILanguageModelPrompt[]; + +interface AILanguageModel extends EventTarget { + prompt(input: AILanguageModelPromptInput, options?: AILanguageModelPromptOptions): Promise; + promptStreaming(input: AILanguageModelPromptInput, options?: AILanguageModelPromptOptions): ReadableStream; + + countPromptTokens(input: AILanguageModelPromptInput, options?: AILanguageModelPromptOptions): Promise; + readonly maxTokens: number; + readonly tokensSoFar: number; + readonly tokensLeft: number; + + readonly topK: number; + readonly temperature: number; + + oncontextoverflow: ((this: AILanguageModel, ev: Event) => any) | null; + + addEventListener( + type: K, + listener: (this: AILanguageModel, ev: AILanguageModelEventMap[K]) => any, + options?: boolean | AddEventListenerOptions, + ): void; + addEventListener( + type: string, + listener: EventListenerOrEventListenerObject, + options?: boolean | AddEventListenerOptions, + ): void; + removeEventListener( + type: K, + listener: (this: AILanguageModel, ev: AILanguageModelEventMap[K]) => any, + options?: boolean | EventListenerOptions, + ): void; + removeEventListener( + type: string, + listener: EventListenerOrEventListenerObject, + options?: boolean | EventListenerOptions, + ): void; + + clone(options?: AILanguageModelCloneOptions): Promise; + destroy(): void; +} + +interface AILanguageModelEventMap { + contextoverflow: Event; +} + +interface AILanguageModelPromptOptions { + signal?: AbortSignal; +} + +interface AILanguageModelCloneOptions { + signal?: AbortSignal; +} + +interface AILanguageModelCapabilities { + readonly available: AICapabilityAvailability; + languageAvailable(languageTag: Intl.UnicodeBCP47LocaleIdentifier): AICapabilityAvailability; + + readonly defaultTopK: number | null; + readonly maxTopK: number | null; + readonly defaultTemperature: number | null; + readonly maxTemperature: number | null; +} + +// Summarizer +// https://github.com/explainers-by-googlers/writing-assistance-apis/#full-api-surface-in-web-idl + +interface AISummarizerFactory { + create(options?: AISummarizerCreateOptions): Promise; + capabilities(): Promise; +} + +interface AISummarizerCreateOptions { + signal?: AbortSignal; + monitor?: AICreateMonitorCallback; + + sharedContext?: string; + type?: AISummarizerType; + format?: AISummarizerFormat; + length?: AISummarizerLength; +} + +type AISummarizerType = "tl;dr" | "key-points" | "teaser" | "headline"; +type AISummarizerFormat = "plain-text" | "markdown"; +type AISummarizerLength = "short" | "medium" | "long"; + +interface AISummarizer { + summarize(input: string, options?: AISummarizerSummarizeOptions): Promise; + summarizeStreaming(input: string, options?: AISummarizerSummarizeOptions): ReadableStream; + + readonly sharedContext: string; + readonly type: AISummarizerType; + readonly format: AISummarizerFormat; + readonly length: AISummarizerLength; + + destroy(): void; +} + +interface AISummarizerSummarizeOptions { + signal?: AbortSignal; + context?: string; +} + +interface AISummarizerCapabilities { + readonly available: AICapabilityAvailability; + + supportsType(type: AISummarizerType): AICapabilityAvailability; + supportsFormat(format: AISummarizerFormat): AICapabilityAvailability; + supportsLength(length: AISummarizerLength): AICapabilityAvailability; + + languageAvailable(languageTag: Intl.UnicodeBCP47LocaleIdentifier): AICapabilityAvailability; +} + +// Writer +// https://github.com/explainers-by-googlers/writing-assistance-apis/#full-api-surface-in-web-idl + +interface AIWriterFactory { + create(options?: AIWriterCreateOptions): Promise; + capabilities(): Promise; +} + +interface AIWriterCreateOptions { + signal?: AbortSignal; + monitor?: AICreateMonitorCallback; + + sharedContext?: string; + tone?: AIWriterTone; + format?: AIWriterFormat; + length?: AIWriterLength; +} + +type AIWriterTone = "formal" | "neutral" | "casual"; +type AIWriterFormat = "plain-text" | "markdown"; +type AIWriterLength = "short" | "medium" | "long"; + +interface AIWriter { + write(writingTask: string, options?: AIWriterWriteOptions): Promise; + writeStreaming(writingTask: string, options?: AIWriterWriteOptions): ReadableStream; + + readonly sharedContext: string; + readonly tone: AIWriterTone; + readonly format: AIWriterFormat; + readonly length: AIWriterLength; + + destroy(): void; +} + +interface AIWriterWriteOptions { + signal?: AbortSignal; + context?: string; +} + +interface AIWriterCapabilities { + readonly available: AICapabilityAvailability; + + supportsTone(tone: AIWriterTone): AICapabilityAvailability; + supportsFormat(format: AIWriterFormat): AICapabilityAvailability; + supportsLength(length: AIWriterLength): AICapabilityAvailability; + + languageAvailable(languageTag: Intl.UnicodeBCP47LocaleIdentifier): AICapabilityAvailability; +} + +// Rewriter +// https://github.com/explainers-by-googlers/writing-assistance-apis/#full-api-surface-in-web-idl + +interface AIRewriterFactory { + create(options?: AIRewriterCreateOptions): Promise; + capabilities(): Promise; +} + +interface AIRewriterCreateOptions { + signal?: AbortSignal; + monitor?: AICreateMonitorCallback; + + sharedContext?: string; + tone?: AIRewriterTone; + format?: AIRewriterFormat; + length?: AIRewriterLength; +} + +type AIRewriterTone = "as-is" | "more-formal" | "more-casual"; +type AIRewriterFormat = "as-is" | "plain-text" | "markdown"; +type AIRewriterLength = "as-is" | "shorter" | "longer"; + +interface AIRewriter { + rewrite(input: string, options?: AIRewriterRewriteOptions): Promise; + rewriteStreaming(input: string, options?: AIRewriterRewriteOptions): ReadableStream; + + readonly sharedContext: string; + readonly tone: AIRewriterTone; + readonly format: AIRewriterFormat; + readonly length: AIRewriterLength; + + destroy(): void; +} + +interface AIRewriterRewriteOptions { + signal?: AbortSignal; + context?: string; +} + +interface AIRewriterCapabilities { + readonly available: AICapabilityAvailability; + + supportsTone(tone: AIRewriterTone): AICapabilityAvailability; + supportsFormat(format: AIRewriterFormat): AICapabilityAvailability; + supportsLength(length: AIRewriterLength): AICapabilityAvailability; + + languageAvailable(languageTag: Intl.UnicodeBCP47LocaleIdentifier): AICapabilityAvailability; } + +// Translator +// https://github.com/WICG/translation-api?tab=readme-ov-file#full-api-surface-in-web-idl + +interface AITranslatorFactory { + create(options: AITranslatorCreateOptions): Promise; + capabilities(): Promise; +} + +interface AITranslator { + translate(input: string, options?: AITranslatorTranslateOptions): Promise; + translateStreaming(input: string, options?: AITranslatorTranslateOptions): ReadableStream; + + readonly sourceLanguage: Intl.UnicodeBCP47LocaleIdentifier; + readonly targetLanguage: Intl.UnicodeBCP47LocaleIdentifier; + + destroy(): void; +} + +interface AITranslatorCapabilities { + readonly available: AICapabilityAvailability; + + languagePairAvailable( + sourceLanguage: Intl.UnicodeBCP47LocaleIdentifier, + targetLanguage: Intl.UnicodeBCP47LocaleIdentifier, + ): AICapabilityAvailability; +} + +interface AITranslatorCreateOptions { + signal?: AbortSignal; + monitor?: AICreateMonitorCallback; + + sourceLanguage: Intl.UnicodeBCP47LocaleIdentifier; + targetLanguage: Intl.UnicodeBCP47LocaleIdentifier; +} + +interface AITranslatorTranslateOptions { + signal?: AbortSignal; +} + +// Language detector +// https://github.com/WICG/translation-api?tab=readme-ov-file#full-api-surface-in-web-idl + +interface AILanguageDetectorFactory { + create(options?: AILanguageDetectorCreateOptions): Promise; + capabilities(): Promise; +} + +interface AILanguageDetector { + detect(input: string, options?: AILanguageDetectorDetectOptions): Promise; + + destroy(): void; +} + +interface AILanguageDetectorCapabilities { + readonly available: AICapabilityAvailability; + + languageAvailable(languageTag: Intl.UnicodeBCP47LocaleIdentifier): AICapabilityAvailability; +} + +interface AILanguageDetectorCreateOptions { + signal?: AbortSignal; + monitor?: AICreateMonitorCallback; +} + +interface AILanguageDetectorDetectOptions { + signal?: AbortSignal; +} + +interface LanguageDetectionResult { + /** null represents unknown language */ + detectedLanguage: Intl.UnicodeBCP47LocaleIdentifier | null; + confidence: number; +} \ No newline at end of file From b242749be8ba12f720df4bcc24ef932ae57a007c Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Tue, 25 Mar 2025 09:08:29 -0700 Subject: [PATCH 03/22] Trim unused AI types --- packages/vertexai/src/types/ai.ts | 322 +----------------------------- 1 file changed, 3 insertions(+), 319 deletions(-) diff --git a/packages/vertexai/src/types/ai.ts b/packages/vertexai/src/types/ai.ts index 86ce44b4d98..10cce1cab9b 100644 --- a/packages/vertexai/src/types/ai.ts +++ b/packages/vertexai/src/types/ai.ts @@ -2,20 +2,13 @@ * Shims @types/dom-chromium-ai * TODO: replace with @types/dom-chromium-ai once we can use es2020.intl. */ -interface AI { - readonly languageModel: AILanguageModelFactory; - readonly summarizer: AISummarizerFactory; - readonly writer: AIWriterFactory; - readonly rewriter: AIRewriterFactory; - readonly translator: AITranslatorFactory; - readonly languageDetector: AILanguageDetectorFactory; -} - interface AICreateMonitor extends EventTarget { + // eslint-disable-next-line @typescript-eslint/no-explicit-any ondownloadprogress: ((this: AICreateMonitor, ev: DownloadProgressEvent) => any) | null; addEventListener( type: K, + // eslint-disable-next-line @typescript-eslint/no-explicit-any listener: (this: AICreateMonitor, ev: AICreateMonitorEventMap[K]) => any, options?: boolean | AddEventListenerOptions, ): void; @@ -26,6 +19,7 @@ interface AICreateMonitor extends EventTarget { ): void; removeEventListener( type: K, + // eslint-disable-next-line @typescript-eslint/no-explicit-any listener: (this: AICreateMonitor, ev: AICreateMonitorEventMap[K]) => any, options?: boolean | EventListenerOptions, ): void; @@ -47,18 +41,9 @@ interface AICreateMonitorEventMap { type AICreateMonitorCallback = (monitor: AICreateMonitor) => void; -type AICapabilityAvailability = "readily" | "after-download" | "no"; - // Language Model // https://github.com/explainers-by-googlers/prompt-api/#full-api-surface-in-web-idl -interface AILanguageModelFactory { - create( - options?: AILanguageModelCreateOptionsWithSystemPrompt | AILanguageModelCreateOptionsWithoutSystemPrompt, - ): Promise; - capabilities(): Promise; -} - interface AILanguageModelCreateOptions { signal?: AbortSignal; monitor?: AICreateMonitorCallback; @@ -72,310 +57,9 @@ export interface AILanguageModelCreateOptionsWithSystemPrompt extends AILanguage initialPrompts?: AILanguageModelPrompt[]; } -interface AILanguageModelCreateOptionsWithoutSystemPrompt extends AILanguageModelCreateOptions { - systemPrompt?: never; - initialPrompts?: - | [AILanguageModelSystemPrompt, ...AILanguageModelPrompt[]] - | AILanguageModelPrompt[]; -} - type AILanguageModelPromptRole = "user" | "assistant"; -type AILanguageModelInitialPromptRole = "system" | AILanguageModelPromptRole; interface AILanguageModelPrompt { role: AILanguageModelPromptRole; content: string; } - -interface AILanguageModelInitialPrompt { - role: AILanguageModelInitialPromptRole; - content: string; -} - -interface AILanguageModelSystemPrompt extends AILanguageModelInitialPrompt { - role: "system"; -} - -type AILanguageModelPromptInput = string | AILanguageModelPrompt | AILanguageModelPrompt[]; - -interface AILanguageModel extends EventTarget { - prompt(input: AILanguageModelPromptInput, options?: AILanguageModelPromptOptions): Promise; - promptStreaming(input: AILanguageModelPromptInput, options?: AILanguageModelPromptOptions): ReadableStream; - - countPromptTokens(input: AILanguageModelPromptInput, options?: AILanguageModelPromptOptions): Promise; - readonly maxTokens: number; - readonly tokensSoFar: number; - readonly tokensLeft: number; - - readonly topK: number; - readonly temperature: number; - - oncontextoverflow: ((this: AILanguageModel, ev: Event) => any) | null; - - addEventListener( - type: K, - listener: (this: AILanguageModel, ev: AILanguageModelEventMap[K]) => any, - options?: boolean | AddEventListenerOptions, - ): void; - addEventListener( - type: string, - listener: EventListenerOrEventListenerObject, - options?: boolean | AddEventListenerOptions, - ): void; - removeEventListener( - type: K, - listener: (this: AILanguageModel, ev: AILanguageModelEventMap[K]) => any, - options?: boolean | EventListenerOptions, - ): void; - removeEventListener( - type: string, - listener: EventListenerOrEventListenerObject, - options?: boolean | EventListenerOptions, - ): void; - - clone(options?: AILanguageModelCloneOptions): Promise; - destroy(): void; -} - -interface AILanguageModelEventMap { - contextoverflow: Event; -} - -interface AILanguageModelPromptOptions { - signal?: AbortSignal; -} - -interface AILanguageModelCloneOptions { - signal?: AbortSignal; -} - -interface AILanguageModelCapabilities { - readonly available: AICapabilityAvailability; - languageAvailable(languageTag: Intl.UnicodeBCP47LocaleIdentifier): AICapabilityAvailability; - - readonly defaultTopK: number | null; - readonly maxTopK: number | null; - readonly defaultTemperature: number | null; - readonly maxTemperature: number | null; -} - -// Summarizer -// https://github.com/explainers-by-googlers/writing-assistance-apis/#full-api-surface-in-web-idl - -interface AISummarizerFactory { - create(options?: AISummarizerCreateOptions): Promise; - capabilities(): Promise; -} - -interface AISummarizerCreateOptions { - signal?: AbortSignal; - monitor?: AICreateMonitorCallback; - - sharedContext?: string; - type?: AISummarizerType; - format?: AISummarizerFormat; - length?: AISummarizerLength; -} - -type AISummarizerType = "tl;dr" | "key-points" | "teaser" | "headline"; -type AISummarizerFormat = "plain-text" | "markdown"; -type AISummarizerLength = "short" | "medium" | "long"; - -interface AISummarizer { - summarize(input: string, options?: AISummarizerSummarizeOptions): Promise; - summarizeStreaming(input: string, options?: AISummarizerSummarizeOptions): ReadableStream; - - readonly sharedContext: string; - readonly type: AISummarizerType; - readonly format: AISummarizerFormat; - readonly length: AISummarizerLength; - - destroy(): void; -} - -interface AISummarizerSummarizeOptions { - signal?: AbortSignal; - context?: string; -} - -interface AISummarizerCapabilities { - readonly available: AICapabilityAvailability; - - supportsType(type: AISummarizerType): AICapabilityAvailability; - supportsFormat(format: AISummarizerFormat): AICapabilityAvailability; - supportsLength(length: AISummarizerLength): AICapabilityAvailability; - - languageAvailable(languageTag: Intl.UnicodeBCP47LocaleIdentifier): AICapabilityAvailability; -} - -// Writer -// https://github.com/explainers-by-googlers/writing-assistance-apis/#full-api-surface-in-web-idl - -interface AIWriterFactory { - create(options?: AIWriterCreateOptions): Promise; - capabilities(): Promise; -} - -interface AIWriterCreateOptions { - signal?: AbortSignal; - monitor?: AICreateMonitorCallback; - - sharedContext?: string; - tone?: AIWriterTone; - format?: AIWriterFormat; - length?: AIWriterLength; -} - -type AIWriterTone = "formal" | "neutral" | "casual"; -type AIWriterFormat = "plain-text" | "markdown"; -type AIWriterLength = "short" | "medium" | "long"; - -interface AIWriter { - write(writingTask: string, options?: AIWriterWriteOptions): Promise; - writeStreaming(writingTask: string, options?: AIWriterWriteOptions): ReadableStream; - - readonly sharedContext: string; - readonly tone: AIWriterTone; - readonly format: AIWriterFormat; - readonly length: AIWriterLength; - - destroy(): void; -} - -interface AIWriterWriteOptions { - signal?: AbortSignal; - context?: string; -} - -interface AIWriterCapabilities { - readonly available: AICapabilityAvailability; - - supportsTone(tone: AIWriterTone): AICapabilityAvailability; - supportsFormat(format: AIWriterFormat): AICapabilityAvailability; - supportsLength(length: AIWriterLength): AICapabilityAvailability; - - languageAvailable(languageTag: Intl.UnicodeBCP47LocaleIdentifier): AICapabilityAvailability; -} - -// Rewriter -// https://github.com/explainers-by-googlers/writing-assistance-apis/#full-api-surface-in-web-idl - -interface AIRewriterFactory { - create(options?: AIRewriterCreateOptions): Promise; - capabilities(): Promise; -} - -interface AIRewriterCreateOptions { - signal?: AbortSignal; - monitor?: AICreateMonitorCallback; - - sharedContext?: string; - tone?: AIRewriterTone; - format?: AIRewriterFormat; - length?: AIRewriterLength; -} - -type AIRewriterTone = "as-is" | "more-formal" | "more-casual"; -type AIRewriterFormat = "as-is" | "plain-text" | "markdown"; -type AIRewriterLength = "as-is" | "shorter" | "longer"; - -interface AIRewriter { - rewrite(input: string, options?: AIRewriterRewriteOptions): Promise; - rewriteStreaming(input: string, options?: AIRewriterRewriteOptions): ReadableStream; - - readonly sharedContext: string; - readonly tone: AIRewriterTone; - readonly format: AIRewriterFormat; - readonly length: AIRewriterLength; - - destroy(): void; -} - -interface AIRewriterRewriteOptions { - signal?: AbortSignal; - context?: string; -} - -interface AIRewriterCapabilities { - readonly available: AICapabilityAvailability; - - supportsTone(tone: AIRewriterTone): AICapabilityAvailability; - supportsFormat(format: AIRewriterFormat): AICapabilityAvailability; - supportsLength(length: AIRewriterLength): AICapabilityAvailability; - - languageAvailable(languageTag: Intl.UnicodeBCP47LocaleIdentifier): AICapabilityAvailability; -} - -// Translator -// https://github.com/WICG/translation-api?tab=readme-ov-file#full-api-surface-in-web-idl - -interface AITranslatorFactory { - create(options: AITranslatorCreateOptions): Promise; - capabilities(): Promise; -} - -interface AITranslator { - translate(input: string, options?: AITranslatorTranslateOptions): Promise; - translateStreaming(input: string, options?: AITranslatorTranslateOptions): ReadableStream; - - readonly sourceLanguage: Intl.UnicodeBCP47LocaleIdentifier; - readonly targetLanguage: Intl.UnicodeBCP47LocaleIdentifier; - - destroy(): void; -} - -interface AITranslatorCapabilities { - readonly available: AICapabilityAvailability; - - languagePairAvailable( - sourceLanguage: Intl.UnicodeBCP47LocaleIdentifier, - targetLanguage: Intl.UnicodeBCP47LocaleIdentifier, - ): AICapabilityAvailability; -} - -interface AITranslatorCreateOptions { - signal?: AbortSignal; - monitor?: AICreateMonitorCallback; - - sourceLanguage: Intl.UnicodeBCP47LocaleIdentifier; - targetLanguage: Intl.UnicodeBCP47LocaleIdentifier; -} - -interface AITranslatorTranslateOptions { - signal?: AbortSignal; -} - -// Language detector -// https://github.com/WICG/translation-api?tab=readme-ov-file#full-api-surface-in-web-idl - -interface AILanguageDetectorFactory { - create(options?: AILanguageDetectorCreateOptions): Promise; - capabilities(): Promise; -} - -interface AILanguageDetector { - detect(input: string, options?: AILanguageDetectorDetectOptions): Promise; - - destroy(): void; -} - -interface AILanguageDetectorCapabilities { - readonly available: AICapabilityAvailability; - - languageAvailable(languageTag: Intl.UnicodeBCP47LocaleIdentifier): AICapabilityAvailability; -} - -interface AILanguageDetectorCreateOptions { - signal?: AbortSignal; - monitor?: AICreateMonitorCallback; -} - -interface AILanguageDetectorDetectOptions { - signal?: AbortSignal; -} - -interface LanguageDetectionResult { - /** null represents unknown language */ - detectedLanguage: Intl.UnicodeBCP47LocaleIdentifier | null; - confidence: number; -} \ No newline at end of file From 5e9745771005e686b30663c81b5fadc1cc3ed67b Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Tue, 25 Mar 2025 09:09:10 -0700 Subject: [PATCH 04/22] Assert HybridParams sets the model name --- packages/vertexai/src/api.test.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/vertexai/src/api.test.ts b/packages/vertexai/src/api.test.ts index 7b25dbdf9e9..d49e5dcc15d 100644 --- a/packages/vertexai/src/api.test.ts +++ b/packages/vertexai/src/api.test.ts @@ -14,7 +14,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { ImagenModelParams, ModelParams, VertexAIErrorCode } from './types'; +import { + ImagenModelParams, + ModelParams, + VertexAIErrorCode +} from './types'; import { VertexAIError } from './errors'; import { ImagenModel, getGenerativeModel, getImagenModel } from './api'; import { expect } from 'chai'; From 1fe8a08d9743cbfdfafa24a35b0402843c94ae06 Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Wed, 26 Mar 2025 16:56:42 -0700 Subject: [PATCH 05/22] Use dom-chromium-ai package directly --- packages/vertexai/package.json | 1 + packages/vertexai/src/types/ai.ts | 65 ------------------------------- yarn.lock | 5 +++ 3 files changed, 6 insertions(+), 65 deletions(-) delete mode 100644 packages/vertexai/src/types/ai.ts diff --git a/packages/vertexai/package.json b/packages/vertexai/package.json index 9faf562a535..076b6a1bc4a 100644 --- a/packages/vertexai/package.json +++ b/packages/vertexai/package.json @@ -58,6 +58,7 @@ "devDependencies": { "@firebase/app": "0.11.4", "@rollup/plugin-json": "6.1.0", + "@types/dom-chromium-ai": "0.0.6", "rollup": "2.79.2", "rollup-plugin-replace": "2.2.0", "rollup-plugin-typescript2": "0.36.0", diff --git a/packages/vertexai/src/types/ai.ts b/packages/vertexai/src/types/ai.ts deleted file mode 100644 index 10cce1cab9b..00000000000 --- a/packages/vertexai/src/types/ai.ts +++ /dev/null @@ -1,65 +0,0 @@ -/** - * Shims @types/dom-chromium-ai - * TODO: replace with @types/dom-chromium-ai once we can use es2020.intl. - */ -interface AICreateMonitor extends EventTarget { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - ondownloadprogress: ((this: AICreateMonitor, ev: DownloadProgressEvent) => any) | null; - - addEventListener( - type: K, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - listener: (this: AICreateMonitor, ev: AICreateMonitorEventMap[K]) => any, - options?: boolean | AddEventListenerOptions, - ): void; - addEventListener( - type: string, - listener: EventListenerOrEventListenerObject, - options?: boolean | AddEventListenerOptions, - ): void; - removeEventListener( - type: K, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - listener: (this: AICreateMonitor, ev: AICreateMonitorEventMap[K]) => any, - options?: boolean | EventListenerOptions, - ): void; - removeEventListener( - type: string, - listener: EventListenerOrEventListenerObject, - options?: boolean | EventListenerOptions, - ): void; -} - -interface DownloadProgressEvent extends Event { - readonly loaded: number; - readonly total: number; -} - -interface AICreateMonitorEventMap { - downloadprogress: DownloadProgressEvent; -} - -type AICreateMonitorCallback = (monitor: AICreateMonitor) => void; - -// Language Model -// https://github.com/explainers-by-googlers/prompt-api/#full-api-surface-in-web-idl - -interface AILanguageModelCreateOptions { - signal?: AbortSignal; - monitor?: AICreateMonitorCallback; - - topK?: number; - temperature?: number; -} - -export interface AILanguageModelCreateOptionsWithSystemPrompt extends AILanguageModelCreateOptions { - systemPrompt?: string; - initialPrompts?: AILanguageModelPrompt[]; -} - -type AILanguageModelPromptRole = "user" | "assistant"; - -interface AILanguageModelPrompt { - role: AILanguageModelPromptRole; - content: string; -} diff --git a/yarn.lock b/yarn.lock index d5ea91a7093..fbfb49f23e0 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2947,6 +2947,11 @@ resolved "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz#334311971d3a07121e7eb91b684a605e7eea9cbd" integrity sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw== +"@types/dom-chromium-ai@0.0.6": + version "0.0.6" + resolved "https://registry.npmjs.org/@types/dom-chromium-ai/-/dom-chromium-ai-0.0.6.tgz#0c9e5712d8db3d26586cd9f175001b509cd2e514" + integrity sha512-/jUGe9a3BLzsjjg18Olk/Ul64PZ0P4aw8uNxrXeXVTni5PSxyCfyhHb4UohsXNVByOnwYGzlqUcb3vYKVsG4mg== + "@types/eslint-scope@^3.7.7": version "3.7.7" resolved "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.7.tgz#3108bd5f18b0cdb277c867b3dd449c9ed7079ac5" From 869fee7518bd683563e71ed2e2cbbff4b3a5f013 Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Tue, 25 Mar 2025 09:30:30 -0700 Subject: [PATCH 06/22] Define ChromeAdapter class --- packages/vertexai/src/types/ai.ts | 161 ++++++++++++++++++++++++++++++ 1 file changed, 161 insertions(+) create mode 100644 packages/vertexai/src/types/ai.ts diff --git a/packages/vertexai/src/types/ai.ts b/packages/vertexai/src/types/ai.ts new file mode 100644 index 00000000000..98a1e1d35bf --- /dev/null +++ b/packages/vertexai/src/types/ai.ts @@ -0,0 +1,161 @@ +/** + * Shims @types/dom-chromium-ai + * TODO: replace with @types/dom-chromium-ai once we can use es2020.intl. + */ +export interface AI { + readonly languageModel: AILanguageModelFactory; +} + +interface AICreateMonitor extends EventTarget { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ondownloadprogress: ((this: AICreateMonitor, ev: DownloadProgressEvent) => any) | null; + + addEventListener( + type: K, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + listener: (this: AICreateMonitor, ev: AICreateMonitorEventMap[K]) => any, + options?: boolean | AddEventListenerOptions, + ): void; + addEventListener( + type: string, + listener: EventListenerOrEventListenerObject, + options?: boolean | AddEventListenerOptions, + ): void; + removeEventListener( + type: K, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + listener: (this: AICreateMonitor, ev: AICreateMonitorEventMap[K]) => any, + options?: boolean | EventListenerOptions, + ): void; + removeEventListener( + type: string, + listener: EventListenerOrEventListenerObject, + options?: boolean | EventListenerOptions, + ): void; +} + +interface DownloadProgressEvent extends Event { + readonly loaded: number; + readonly total: number; +} + +interface AICreateMonitorEventMap { + downloadprogress: DownloadProgressEvent; +} + +type AICreateMonitorCallback = (monitor: AICreateMonitor) => void; + +type AICapabilityAvailability = "readily" | "after-download" | "no"; + +// Language Model +// https://github.com/explainers-by-googlers/prompt-api/#full-api-surface-in-web-idl + +interface AILanguageModelFactory { + create( + options?: AILanguageModelCreateOptionsWithSystemPrompt | AILanguageModelCreateOptionsWithoutSystemPrompt, + ): Promise; + capabilities(): Promise; +} + +interface AILanguageModelCreateOptions { + signal?: AbortSignal; + monitor?: AICreateMonitorCallback; + + topK?: number; + temperature?: number; +} + +export interface AILanguageModelCreateOptionsWithSystemPrompt extends AILanguageModelCreateOptions { + systemPrompt?: string; + initialPrompts?: AILanguageModelPrompt[]; +} + +interface AILanguageModelCreateOptionsWithoutSystemPrompt extends AILanguageModelCreateOptions { + systemPrompt?: never; + initialPrompts?: + | [AILanguageModelSystemPrompt, ...AILanguageModelPrompt[]] + | AILanguageModelPrompt[]; +} + +type AILanguageModelPromptRole = "user" | "assistant"; +type AILanguageModelInitialPromptRole = "system" | AILanguageModelPromptRole; + +interface AILanguageModelPrompt { + role: AILanguageModelPromptRole; + content: string; +} + +interface AILanguageModelInitialPrompt { + role: AILanguageModelInitialPromptRole; + content: string; +} + +interface AILanguageModelSystemPrompt extends AILanguageModelInitialPrompt { + role: "system"; +} + +type AILanguageModelPromptInput = string | AILanguageModelPrompt | AILanguageModelPrompt[]; + +interface AILanguageModel extends EventTarget { + prompt(input: AILanguageModelPromptInput, options?: AILanguageModelPromptOptions): Promise; + promptStreaming(input: AILanguageModelPromptInput, options?: AILanguageModelPromptOptions): ReadableStream; + + countPromptTokens(input: AILanguageModelPromptInput, options?: AILanguageModelPromptOptions): Promise; + readonly maxTokens: number; + readonly tokensSoFar: number; + readonly tokensLeft: number; + + readonly topK: number; + readonly temperature: number; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + oncontextoverflow: ((this: AILanguageModel, ev: Event) => any) | null; + + addEventListener( + type: K, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + listener: (this: AILanguageModel, ev: AILanguageModelEventMap[K]) => any, + options?: boolean | AddEventListenerOptions, + ): void; + addEventListener( + type: string, + listener: EventListenerOrEventListenerObject, + options?: boolean | AddEventListenerOptions, + ): void; + removeEventListener( + type: K, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + listener: (this: AILanguageModel, ev: AILanguageModelEventMap[K]) => any, + options?: boolean | EventListenerOptions, + ): void; + removeEventListener( + type: string, + listener: EventListenerOrEventListenerObject, + options?: boolean | EventListenerOptions, + ): void; + + clone(options?: AILanguageModelCloneOptions): Promise; + destroy(): void; +} + +interface AILanguageModelEventMap { + contextoverflow: Event; +} + +interface AILanguageModelPromptOptions { + signal?: AbortSignal; +} + +interface AILanguageModelCloneOptions { + signal?: AbortSignal; +} + +interface AILanguageModelCapabilities { + readonly available: AICapabilityAvailability; + languageAvailable(languageTag: Intl.UnicodeBCP47LocaleIdentifier): AICapabilityAvailability; + + readonly defaultTopK: number | null; + readonly maxTopK: number | null; + readonly defaultTemperature: number | null; + readonly maxTemperature: number | null; +} From ff31b42ef23770034c6d93b6fcd5ce0fcccfb4ed Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Tue, 1 Apr 2025 17:14:19 -0700 Subject: [PATCH 07/22] Implement ChromeAdapter class --- packages/util/src/environment.ts | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/packages/util/src/environment.ts b/packages/util/src/environment.ts index a0467b08c59..50d5f534106 100644 --- a/packages/util/src/environment.ts +++ b/packages/util/src/environment.ts @@ -173,6 +173,12 @@ export function isSafari(): boolean { ); } +export function isChrome(): boolean { + return ( + !isNode() && !!navigator.userAgent && navigator.userAgent.includes('Chrome') + ); +} + /** * This method checks if indexedDB is supported by current browser/service worker context * @return true if indexedDB is supported by current browser/service worker context From 1e487d5cbcd3a4f1229fa301de9d62069f1962be Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Tue, 1 Apr 2025 17:15:47 -0700 Subject: [PATCH 08/22] Integrate with e2e test app --- e2e/sample-apps/modular.js | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/e2e/sample-apps/modular.js b/e2e/sample-apps/modular.js index 4c5238d44dc..77cded98eb1 100644 --- a/e2e/sample-apps/modular.js +++ b/e2e/sample-apps/modular.js @@ -58,12 +58,7 @@ import { onValue, off } from 'firebase/database'; -import { - getGenerativeModel, - getVertexAI, - InferenceMode, - VertexAI -} from 'firebase/vertexai'; +import { getGenerativeModel, getVertexAI } from 'firebase/vertexai'; import { getDataConnect, DataConnect } from 'firebase/data-connect'; /** @@ -337,6 +332,15 @@ function callDataConnect(app) { console.log('[DATACONNECT] initialized'); } +async function callVertex(app) { + console.log('[VERTEX] start'); + const vertex = getVertexAI(app); + const model = getGenerativeModel(vertex, { mode: 'prefer_on_device' }); + const result = await model.generateContent("What is Roko's Basalisk?"); + console.log(result.response.text()); + console.log('[VERTEX] initialized'); +} + /** * Run smoke tests for all products. * Comment out any products you want to ignore. @@ -358,6 +362,7 @@ async function main() { await callVertexAI(app); callDataConnect(app); await authLogout(app); + await callVertex(app); console.log('DONE'); } From 8307fe5fd9a76ad15a1a0148c51e561da14b1408 Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Fri, 28 Mar 2025 16:41:54 -0700 Subject: [PATCH 09/22] Parameterize default in-cloud model name --- packages/vertexai/src/api.test.ts | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/packages/vertexai/src/api.test.ts b/packages/vertexai/src/api.test.ts index d49e5dcc15d..7b25dbdf9e9 100644 --- a/packages/vertexai/src/api.test.ts +++ b/packages/vertexai/src/api.test.ts @@ -14,11 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { - ImagenModelParams, - ModelParams, - VertexAIErrorCode -} from './types'; +import { ImagenModelParams, ModelParams, VertexAIErrorCode } from './types'; import { VertexAIError } from './errors'; import { ImagenModel, getGenerativeModel, getImagenModel } from './api'; import { expect } from 'chai'; From 291c53b3e8d6228721e25a5eddfd59bfe4076b25 Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Tue, 1 Apr 2025 17:44:30 -0700 Subject: [PATCH 10/22] Use type for inference mode and update docs --- docs-devsite/vertexai.hybridparams.md | 10 ++++++++++ packages/vertexai/src/api.test.ts | 4 ++++ 2 files changed, 14 insertions(+) diff --git a/docs-devsite/vertexai.hybridparams.md b/docs-devsite/vertexai.hybridparams.md index cf847b40fa7..c9b053b09f4 100644 --- a/docs-devsite/vertexai.hybridparams.md +++ b/docs-devsite/vertexai.hybridparams.md @@ -36,6 +36,16 @@ Optional. Specifies advanced params for in-cloud inference. inCloudParams?: ModelParams; ``` +## HybridParams.inCloudParams + +Optional. Specifies advanced params for in-cloud inference. + +Signature: + +```typescript +inCloudParams?: ModelParams; +``` + ## HybridParams.mode Specifies on-device or in-cloud inference. Defaults to prefer on-device. diff --git a/packages/vertexai/src/api.test.ts b/packages/vertexai/src/api.test.ts index 7b25dbdf9e9..c00e96840d2 100644 --- a/packages/vertexai/src/api.test.ts +++ b/packages/vertexai/src/api.test.ts @@ -111,7 +111,11 @@ describe('Top level API', () => { }); it('getGenerativeModel with HybridParams honors a model override', () => { const genModel = getGenerativeModel(fakeVertexAI, { +<<<<<<< HEAD mode: 'prefer_on_device', +======= + mode: 'only_in_cloud', +>>>>>>> 814a1dc95 (Use type for inference mode and update docs) inCloudParams: { model: 'my-model' } }); expect(genModel.model).to.equal('publishers/google/models/my-model'); From fe2bebc8cdff91cd5717204ca2d7a868f723f27d Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Thu, 3 Apr 2025 13:43:54 -0700 Subject: [PATCH 11/22] Remove stray ai.ts --- packages/vertexai/src/api.test.ts | 4 - packages/vertexai/src/types/ai.ts | 161 ------------------------------ 2 files changed, 165 deletions(-) delete mode 100644 packages/vertexai/src/types/ai.ts diff --git a/packages/vertexai/src/api.test.ts b/packages/vertexai/src/api.test.ts index c00e96840d2..7b25dbdf9e9 100644 --- a/packages/vertexai/src/api.test.ts +++ b/packages/vertexai/src/api.test.ts @@ -111,11 +111,7 @@ describe('Top level API', () => { }); it('getGenerativeModel with HybridParams honors a model override', () => { const genModel = getGenerativeModel(fakeVertexAI, { -<<<<<<< HEAD mode: 'prefer_on_device', -======= - mode: 'only_in_cloud', ->>>>>>> 814a1dc95 (Use type for inference mode and update docs) inCloudParams: { model: 'my-model' } }); expect(genModel.model).to.equal('publishers/google/models/my-model'); diff --git a/packages/vertexai/src/types/ai.ts b/packages/vertexai/src/types/ai.ts deleted file mode 100644 index 98a1e1d35bf..00000000000 --- a/packages/vertexai/src/types/ai.ts +++ /dev/null @@ -1,161 +0,0 @@ -/** - * Shims @types/dom-chromium-ai - * TODO: replace with @types/dom-chromium-ai once we can use es2020.intl. - */ -export interface AI { - readonly languageModel: AILanguageModelFactory; -} - -interface AICreateMonitor extends EventTarget { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - ondownloadprogress: ((this: AICreateMonitor, ev: DownloadProgressEvent) => any) | null; - - addEventListener( - type: K, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - listener: (this: AICreateMonitor, ev: AICreateMonitorEventMap[K]) => any, - options?: boolean | AddEventListenerOptions, - ): void; - addEventListener( - type: string, - listener: EventListenerOrEventListenerObject, - options?: boolean | AddEventListenerOptions, - ): void; - removeEventListener( - type: K, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - listener: (this: AICreateMonitor, ev: AICreateMonitorEventMap[K]) => any, - options?: boolean | EventListenerOptions, - ): void; - removeEventListener( - type: string, - listener: EventListenerOrEventListenerObject, - options?: boolean | EventListenerOptions, - ): void; -} - -interface DownloadProgressEvent extends Event { - readonly loaded: number; - readonly total: number; -} - -interface AICreateMonitorEventMap { - downloadprogress: DownloadProgressEvent; -} - -type AICreateMonitorCallback = (monitor: AICreateMonitor) => void; - -type AICapabilityAvailability = "readily" | "after-download" | "no"; - -// Language Model -// https://github.com/explainers-by-googlers/prompt-api/#full-api-surface-in-web-idl - -interface AILanguageModelFactory { - create( - options?: AILanguageModelCreateOptionsWithSystemPrompt | AILanguageModelCreateOptionsWithoutSystemPrompt, - ): Promise; - capabilities(): Promise; -} - -interface AILanguageModelCreateOptions { - signal?: AbortSignal; - monitor?: AICreateMonitorCallback; - - topK?: number; - temperature?: number; -} - -export interface AILanguageModelCreateOptionsWithSystemPrompt extends AILanguageModelCreateOptions { - systemPrompt?: string; - initialPrompts?: AILanguageModelPrompt[]; -} - -interface AILanguageModelCreateOptionsWithoutSystemPrompt extends AILanguageModelCreateOptions { - systemPrompt?: never; - initialPrompts?: - | [AILanguageModelSystemPrompt, ...AILanguageModelPrompt[]] - | AILanguageModelPrompt[]; -} - -type AILanguageModelPromptRole = "user" | "assistant"; -type AILanguageModelInitialPromptRole = "system" | AILanguageModelPromptRole; - -interface AILanguageModelPrompt { - role: AILanguageModelPromptRole; - content: string; -} - -interface AILanguageModelInitialPrompt { - role: AILanguageModelInitialPromptRole; - content: string; -} - -interface AILanguageModelSystemPrompt extends AILanguageModelInitialPrompt { - role: "system"; -} - -type AILanguageModelPromptInput = string | AILanguageModelPrompt | AILanguageModelPrompt[]; - -interface AILanguageModel extends EventTarget { - prompt(input: AILanguageModelPromptInput, options?: AILanguageModelPromptOptions): Promise; - promptStreaming(input: AILanguageModelPromptInput, options?: AILanguageModelPromptOptions): ReadableStream; - - countPromptTokens(input: AILanguageModelPromptInput, options?: AILanguageModelPromptOptions): Promise; - readonly maxTokens: number; - readonly tokensSoFar: number; - readonly tokensLeft: number; - - readonly topK: number; - readonly temperature: number; - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - oncontextoverflow: ((this: AILanguageModel, ev: Event) => any) | null; - - addEventListener( - type: K, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - listener: (this: AILanguageModel, ev: AILanguageModelEventMap[K]) => any, - options?: boolean | AddEventListenerOptions, - ): void; - addEventListener( - type: string, - listener: EventListenerOrEventListenerObject, - options?: boolean | AddEventListenerOptions, - ): void; - removeEventListener( - type: K, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - listener: (this: AILanguageModel, ev: AILanguageModelEventMap[K]) => any, - options?: boolean | EventListenerOptions, - ): void; - removeEventListener( - type: string, - listener: EventListenerOrEventListenerObject, - options?: boolean | EventListenerOptions, - ): void; - - clone(options?: AILanguageModelCloneOptions): Promise; - destroy(): void; -} - -interface AILanguageModelEventMap { - contextoverflow: Event; -} - -interface AILanguageModelPromptOptions { - signal?: AbortSignal; -} - -interface AILanguageModelCloneOptions { - signal?: AbortSignal; -} - -interface AILanguageModelCapabilities { - readonly available: AICapabilityAvailability; - languageAvailable(languageTag: Intl.UnicodeBCP47LocaleIdentifier): AICapabilityAvailability; - - readonly defaultTopK: number | null; - readonly maxTopK: number | null; - readonly defaultTemperature: number | null; - readonly maxTemperature: number | null; -} From d4286d683395c7c3b8c9dee8bd0870e53df0b997 Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Thu, 3 Apr 2025 14:01:12 -0700 Subject: [PATCH 12/22] Run yarn format --- e2e/sample-apps/modular.js | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/e2e/sample-apps/modular.js b/e2e/sample-apps/modular.js index 77cded98eb1..994f8ddc338 100644 --- a/e2e/sample-apps/modular.js +++ b/e2e/sample-apps/modular.js @@ -58,7 +58,16 @@ import { onValue, off } from 'firebase/database'; +<<<<<<< HEAD import { getGenerativeModel, getVertexAI } from 'firebase/vertexai'; +======= +import { + getGenerativeModel, + getVertexAI, + InferenceMode, + VertexAI +} from 'firebase/vertexai'; +>>>>>>> 3f02db006 (Run yarn format) import { getDataConnect, DataConnect } from 'firebase/data-connect'; /** From b898cd0fb9c1b1963b7ffd1170050b3c542e1fac Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Thu, 3 Apr 2025 18:16:32 -0700 Subject: [PATCH 13/22] Test request-based availability checks --- .../vertexai/src/methods/chrome-adapter.ts | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/packages/vertexai/src/methods/chrome-adapter.ts b/packages/vertexai/src/methods/chrome-adapter.ts index 10844079c03..ab73c82bfb5 100644 --- a/packages/vertexai/src/methods/chrome-adapter.ts +++ b/packages/vertexai/src/methods/chrome-adapter.ts @@ -141,6 +141,25 @@ export class ChromeAdapter { } } + if (request.systemInstruction) { + const systemContent = request.systemInstruction as Content; + // Returns false if the role can't be represented on-device. + if (systemContent.role && systemContent.role === 'function') { + return false; + } + + // Returns false if the system prompt is multi-part. + if (systemContent.parts && systemContent.parts.length > 1) { + return false; + } + + // Returns false if the system prompt isn't text. + const systemText = request.systemInstruction as TextPart; + if (!systemText.text) { + return false; + } + } + return true; } From 2fb27953e0c2cc74a434cc4708c1d721bbb9295e Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Fri, 4 Apr 2025 16:19:44 -0700 Subject: [PATCH 14/22] Remove request.systemInstruction validation We only define system prompts via onDeviceParams initialization. --- .../vertexai/src/methods/chrome-adapter.ts | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/packages/vertexai/src/methods/chrome-adapter.ts b/packages/vertexai/src/methods/chrome-adapter.ts index ab73c82bfb5..10844079c03 100644 --- a/packages/vertexai/src/methods/chrome-adapter.ts +++ b/packages/vertexai/src/methods/chrome-adapter.ts @@ -141,25 +141,6 @@ export class ChromeAdapter { } } - if (request.systemInstruction) { - const systemContent = request.systemInstruction as Content; - // Returns false if the role can't be represented on-device. - if (systemContent.role && systemContent.role === 'function') { - return false; - } - - // Returns false if the system prompt is multi-part. - if (systemContent.parts && systemContent.parts.length > 1) { - return false; - } - - // Returns false if the system prompt isn't text. - const systemText = request.systemInstruction as TextPart; - if (!systemText.text) { - return false; - } - } - return true; } From ef893c93c4cae1371c6d9ddb9b29f896e65af2a0 Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Mon, 7 Apr 2025 16:45:14 -0700 Subject: [PATCH 15/22] Integrate chrome adapter into stream methods --- e2e/sample-apps/modular.js | 28 ++++--------- packages/vertexai/src/methods/chat-session.ts | 1 + .../vertexai/src/methods/chrome-adapter.ts | 40 +++++++++++++++++++ .../vertexai/src/methods/generate-content.ts | 26 ++++++++++-- .../vertexai/src/models/generative-model.ts | 1 + 5 files changed, 72 insertions(+), 24 deletions(-) diff --git a/e2e/sample-apps/modular.js b/e2e/sample-apps/modular.js index 994f8ddc338..e3170bb3c57 100644 --- a/e2e/sample-apps/modular.js +++ b/e2e/sample-apps/modular.js @@ -58,16 +58,7 @@ import { onValue, off } from 'firebase/database'; -<<<<<<< HEAD import { getGenerativeModel, getVertexAI } from 'firebase/vertexai'; -======= -import { - getGenerativeModel, - getVertexAI, - InferenceMode, - VertexAI -} from 'firebase/vertexai'; ->>>>>>> 3f02db006 (Run yarn format) import { getDataConnect, DataConnect } from 'firebase/data-connect'; /** @@ -322,8 +313,13 @@ function callPerformance(app) { async function callVertexAI(app) { console.log('[VERTEXAI] start'); const vertexAI = getVertexAI(app); - const model = getGenerativeModel(vertexAI, { model: 'gemini-1.5-flash' }); - const result = await model.countTokens('abcdefg'); + const model = getGenerativeModel(vertexAI, { + mode: 'prefer_in_cloud' + }); + const result = await model.generateContentStream("What is Roko's Basalisk?"); + for await (const chunk of result.stream) { + console.log(chunk.text()); + } console.log(`[VERTEXAI] counted tokens: ${result.totalTokens}`); } @@ -341,15 +337,6 @@ function callDataConnect(app) { console.log('[DATACONNECT] initialized'); } -async function callVertex(app) { - console.log('[VERTEX] start'); - const vertex = getVertexAI(app); - const model = getGenerativeModel(vertex, { mode: 'prefer_on_device' }); - const result = await model.generateContent("What is Roko's Basalisk?"); - console.log(result.response.text()); - console.log('[VERTEX] initialized'); -} - /** * Run smoke tests for all products. * Comment out any products you want to ignore. @@ -371,7 +358,6 @@ async function main() { await callVertexAI(app); callDataConnect(app); await authLogout(app); - await callVertex(app); console.log('DONE'); } diff --git a/packages/vertexai/src/methods/chat-session.ts b/packages/vertexai/src/methods/chat-session.ts index 4188872cff7..112ddf5857e 100644 --- a/packages/vertexai/src/methods/chat-session.ts +++ b/packages/vertexai/src/methods/chat-session.ts @@ -149,6 +149,7 @@ export class ChatSession { this._apiSettings, this.model, generateContentRequest, + this.chromeAdapter, this.requestOptions ); diff --git a/packages/vertexai/src/methods/chrome-adapter.ts b/packages/vertexai/src/methods/chrome-adapter.ts index 10844079c03..1781984a86e 100644 --- a/packages/vertexai/src/methods/chrome-adapter.ts +++ b/packages/vertexai/src/methods/chrome-adapter.ts @@ -117,6 +117,46 @@ export class ChromeAdapter { } as Response; } + /** + * Generates a stream of content. + */ + async generateContentStreamOnDevice( + request: GenerateContentRequest + ): Promise { + const session = await this.createSession( + // TODO: normalize on-device params during construction. + this.onDeviceParams || {} + ); + const messages = ChromeAdapter.toLanguageModelMessages(request.contents); + const stream = await session.promptStreaming(messages); + return ChromeAdapter.toStreamResponse(stream); + } + // Formats string stream returned by Chrome as SSE returned by Vertex. + private static async toStreamResponse( + stream: ReadableStream + ): Promise { + const encoder = new TextEncoder(); + return { + body: stream.pipeThrough( + new TransformStream({ + transform(chunk, controller) { + const json = JSON.stringify({ + candidates: [ + { + content: { + role: 'model', + parts: [{ text: chunk }] + } + } + ] + }); + controller.enqueue(encoder.encode(`data: ${json}\n\n`)); + } + }) + ) + } as Response; + } + /** * Asserts inference for the given request can be performed by an on-device model. */ diff --git a/packages/vertexai/src/methods/generate-content.ts b/packages/vertexai/src/methods/generate-content.ts index ba7a162aa9c..a6343bcc3a8 100644 --- a/packages/vertexai/src/methods/generate-content.ts +++ b/packages/vertexai/src/methods/generate-content.ts @@ -28,13 +28,13 @@ import { processStream } from '../requests/stream-reader'; import { ApiSettings } from '../types/internal'; import { ChromeAdapter } from './chrome-adapter'; -export async function generateContentStream( +async function generateContentStreamOnCloud( apiSettings: ApiSettings, model: string, params: GenerateContentRequest, requestOptions?: RequestOptions -): Promise { - const response = await makeRequest( +): Promise { + return makeRequest( model, Task.STREAM_GENERATE_CONTENT, apiSettings, @@ -42,6 +42,26 @@ export async function generateContentStream( JSON.stringify(params), requestOptions ); +} + +export async function generateContentStream( + apiSettings: ApiSettings, + model: string, + params: GenerateContentRequest, + chromeAdapter: ChromeAdapter, + requestOptions?: RequestOptions +): Promise { + let response; + if (await chromeAdapter.isAvailable(params)) { + response = await chromeAdapter.generateContentStreamOnDevice(params); + } else { + response = await generateContentStreamOnCloud( + apiSettings, + model, + params, + requestOptions + ); + } return processStream(response); } diff --git a/packages/vertexai/src/models/generative-model.ts b/packages/vertexai/src/models/generative-model.ts index c58eb3a1497..7b4282c471c 100644 --- a/packages/vertexai/src/models/generative-model.ts +++ b/packages/vertexai/src/models/generative-model.ts @@ -123,6 +123,7 @@ export class GenerativeModel extends VertexAIModel { systemInstruction: this.systemInstruction, ...formattedParams }, + this.chromeAdapter, this.requestOptions ); } From 4c378593bd096e2e8a5181b8b97b5edff0dc57b0 Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Mon, 7 Apr 2025 17:09:05 -0700 Subject: [PATCH 16/22] Refactor to emulate Vertex response --- .../vertexai/src/methods/chrome-adapter.ts | 29 ++++++++++++------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/packages/vertexai/src/methods/chrome-adapter.ts b/packages/vertexai/src/methods/chrome-adapter.ts index 1781984a86e..0988bc5f53b 100644 --- a/packages/vertexai/src/methods/chrome-adapter.ts +++ b/packages/vertexai/src/methods/chrome-adapter.ts @@ -103,17 +103,23 @@ export class ChromeAdapter { ); const messages = ChromeAdapter.toLanguageModelMessages(request.contents); const text = await session.prompt(messages); + return ChromeAdapter.toResponse(text); + } + + /** + * Formats string returned by Chrome as a {@link Response} returned by Vertex. + */ + private static toResponse(text: string): Response { return { - json: () => - Promise.resolve({ - candidates: [ - { - content: { - parts: [{ text }] - } + json: async () => ({ + candidates: [ + { + content: { + parts: [{ text }] } - ] - }) + } + ] + }) } as Response; } @@ -131,7 +137,10 @@ export class ChromeAdapter { const stream = await session.promptStreaming(messages); return ChromeAdapter.toStreamResponse(stream); } - // Formats string stream returned by Chrome as SSE returned by Vertex. + + /** + * Formats string stream returned by Chrome as SSE returned by Vertex. + */ private static async toStreamResponse( stream: ReadableStream ): Promise { From eb25fec7619bc2bcac53c9c67c5905a574d1910a Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Mon, 7 Apr 2025 17:11:54 -0700 Subject: [PATCH 17/22] Group response formatting methods together --- .../vertexai/src/methods/chrome-adapter.ts | 96 +++++++++---------- 1 file changed, 43 insertions(+), 53 deletions(-) diff --git a/packages/vertexai/src/methods/chrome-adapter.ts b/packages/vertexai/src/methods/chrome-adapter.ts index 0988bc5f53b..6c63c5a54f8 100644 --- a/packages/vertexai/src/methods/chrome-adapter.ts +++ b/packages/vertexai/src/methods/chrome-adapter.ts @@ -105,27 +105,6 @@ export class ChromeAdapter { const text = await session.prompt(messages); return ChromeAdapter.toResponse(text); } - - /** - * Formats string returned by Chrome as a {@link Response} returned by Vertex. - */ - private static toResponse(text: string): Response { - return { - json: async () => ({ - candidates: [ - { - content: { - parts: [{ text }] - } - } - ] - }) - } as Response; - } - - /** - * Generates a stream of content. - */ async generateContentStreamOnDevice( request: GenerateContentRequest ): Promise { @@ -137,38 +116,6 @@ export class ChromeAdapter { const stream = await session.promptStreaming(messages); return ChromeAdapter.toStreamResponse(stream); } - - /** - * Formats string stream returned by Chrome as SSE returned by Vertex. - */ - private static async toStreamResponse( - stream: ReadableStream - ): Promise { - const encoder = new TextEncoder(); - return { - body: stream.pipeThrough( - new TransformStream({ - transform(chunk, controller) { - const json = JSON.stringify({ - candidates: [ - { - content: { - role: 'model', - parts: [{ text: chunk }] - } - } - ] - }); - controller.enqueue(encoder.encode(`data: ${json}\n\n`)); - } - }) - ) - } as Response; - } - - /** - * Asserts inference for the given request can be performed by an on-device model. - */ private static isOnDeviceRequest(request: GenerateContentRequest): boolean { // Returns false if the prompt is empty. if (request.contents.length === 0) { @@ -273,4 +220,47 @@ export class ChromeAdapter { this.oldSession = newSession; return newSession; } + + /** + * Formats string returned by Chrome as a {@link Response} returned by Vertex. + */ + private static toResponse(text: string): Response { + return { + json: async () => ({ + candidates: [ + { + content: { + parts: [{ text }] + } + } + ] + }) + } as Response; + } + + /** + * Formats string stream returned by Chrome as SSE returned by Vertex. + */ + private static toStreamResponse(stream: ReadableStream): Response { + const encoder = new TextEncoder(); + return { + body: stream.pipeThrough( + new TransformStream({ + transform(chunk, controller) { + const json = JSON.stringify({ + candidates: [ + { + content: { + role: 'model', + parts: [{ text: chunk }] + } + } + ] + }); + controller.enqueue(encoder.encode(`data: ${json}\n\n`)); + } + }) + ) + } as Response; + } } From b8d849c07e55a67a5a901528e8b853bfe8f2fc01 Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Fri, 18 Apr 2025 14:43:24 -0700 Subject: [PATCH 18/22] Run docgen --- common/api-review/util.api.md | 5 +++++ docs-devsite/vertexai.hybridparams.md | 10 ---------- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/common/api-review/util.api.md b/common/api-review/util.api.md index 8c62ff229ac..fb8afb2319c 100644 --- a/common/api-review/util.api.md +++ b/common/api-review/util.api.md @@ -264,6 +264,11 @@ export function isBrowser(): boolean; // @public (undocumented) export function isBrowserExtension(): boolean; +// Warning: (ae-missing-release-tag) "isChrome" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) +// +// @public (undocumented) +export function isChrome(): boolean; + // Warning: (ae-missing-release-tag) "isCloudflareWorker" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) // // @public diff --git a/docs-devsite/vertexai.hybridparams.md b/docs-devsite/vertexai.hybridparams.md index c9b053b09f4..cf847b40fa7 100644 --- a/docs-devsite/vertexai.hybridparams.md +++ b/docs-devsite/vertexai.hybridparams.md @@ -36,16 +36,6 @@ Optional. Specifies advanced params for in-cloud inference. inCloudParams?: ModelParams; ``` -## HybridParams.inCloudParams - -Optional. Specifies advanced params for in-cloud inference. - -Signature: - -```typescript -inCloudParams?: ModelParams; -``` - ## HybridParams.mode Specifies on-device or in-cloud inference. Defaults to prefer on-device. From 1b9c98dc1570201d4e83416c0c7ce0f3f19b5853 Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Fri, 18 Apr 2025 14:57:41 -0700 Subject: [PATCH 19/22] Re-remove isChrome --- common/api-review/util.api.md | 5 ----- packages/util/src/environment.ts | 6 ------ 2 files changed, 11 deletions(-) diff --git a/common/api-review/util.api.md b/common/api-review/util.api.md index fb8afb2319c..8c62ff229ac 100644 --- a/common/api-review/util.api.md +++ b/common/api-review/util.api.md @@ -264,11 +264,6 @@ export function isBrowser(): boolean; // @public (undocumented) export function isBrowserExtension(): boolean; -// Warning: (ae-missing-release-tag) "isChrome" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) -// -// @public (undocumented) -export function isChrome(): boolean; - // Warning: (ae-missing-release-tag) "isCloudflareWorker" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) // // @public diff --git a/packages/util/src/environment.ts b/packages/util/src/environment.ts index 50d5f534106..a0467b08c59 100644 --- a/packages/util/src/environment.ts +++ b/packages/util/src/environment.ts @@ -173,12 +173,6 @@ export function isSafari(): boolean { ); } -export function isChrome(): boolean { - return ( - !isNode() && !!navigator.userAgent && navigator.userAgent.includes('Chrome') - ); -} - /** * This method checks if indexedDB is supported by current browser/service worker context * @return true if indexedDB is supported by current browser/service worker context From 5092bd8c7982ceaae0182aed427c9d30160b5b1c Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Fri, 18 Apr 2025 15:49:07 -0700 Subject: [PATCH 20/22] Re-remove dom-chromium-ai --- packages/vertexai/package.json | 1 - yarn.lock | 5 ----- 2 files changed, 6 deletions(-) diff --git a/packages/vertexai/package.json b/packages/vertexai/package.json index 076b6a1bc4a..9faf562a535 100644 --- a/packages/vertexai/package.json +++ b/packages/vertexai/package.json @@ -58,7 +58,6 @@ "devDependencies": { "@firebase/app": "0.11.4", "@rollup/plugin-json": "6.1.0", - "@types/dom-chromium-ai": "0.0.6", "rollup": "2.79.2", "rollup-plugin-replace": "2.2.0", "rollup-plugin-typescript2": "0.36.0", diff --git a/yarn.lock b/yarn.lock index fbfb49f23e0..d5ea91a7093 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2947,11 +2947,6 @@ resolved "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz#334311971d3a07121e7eb91b684a605e7eea9cbd" integrity sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw== -"@types/dom-chromium-ai@0.0.6": - version "0.0.6" - resolved "https://registry.npmjs.org/@types/dom-chromium-ai/-/dom-chromium-ai-0.0.6.tgz#0c9e5712d8db3d26586cd9f175001b509cd2e514" - integrity sha512-/jUGe9a3BLzsjjg18Olk/Ul64PZ0P4aw8uNxrXeXVTni5PSxyCfyhHb4UohsXNVByOnwYGzlqUcb3vYKVsG4mg== - "@types/eslint-scope@^3.7.7": version "3.7.7" resolved "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.7.tgz#3108bd5f18b0cdb277c867b3dd449c9ed7079ac5" From 025b7866604287db45f32163326993aa241ee5fa Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Fri, 18 Apr 2025 16:51:22 -0700 Subject: [PATCH 21/22] Unit test stream method --- .../src/methods/chrome-adapter.test.ts | 67 +++++++++++++++++++ 1 file changed, 67 insertions(+) diff --git a/packages/vertexai/src/methods/chrome-adapter.test.ts b/packages/vertexai/src/methods/chrome-adapter.test.ts index b11fb9c937e..18673e0f0e0 100644 --- a/packages/vertexai/src/methods/chrome-adapter.test.ts +++ b/packages/vertexai/src/methods/chrome-adapter.test.ts @@ -30,6 +30,25 @@ import { GenerateContentRequest } from '../types'; use(sinonChai); use(chaiAsPromised); +/** + * Converts the ReadableStream from response.body to an array of strings. + */ +async function toStringArray( + stream: ReadableStream +): Promise { + const decoder = new TextDecoder(); + const actual = []; + const reader = stream.getReader(); + while (true) { + const { done, value } = await reader.read(); + if (done) { + break; + } + actual.push(decoder.decode(value)); + } + return actual; +} + describe('ChromeAdapter', () => { describe('isAvailable', () => { it('returns false if mode is only cloud', async () => { @@ -307,4 +326,52 @@ describe('ChromeAdapter', () => { }); }); }); + describe('generateContentStreamOnDevice', () => { + it('generates content stream', async () => { + const languageModelProvider = { + create: () => Promise.resolve({}) + } as LanguageModel; + const languageModel = { + promptStreaming: _i => new ReadableStream() + } as LanguageModel; + const createStub = stub(languageModelProvider, 'create').resolves( + languageModel + ); + const part = 'hi'; + const promptStub = stub(languageModel, 'promptStreaming').returns( + new ReadableStream({ + start(controller) { + controller.enqueue([part]); + controller.close(); + } + }) + ); + const onDeviceParams = {} as LanguageModelCreateOptions; + const adapter = new ChromeAdapter( + languageModelProvider, + 'prefer_on_device', + onDeviceParams + ); + const request = { + contents: [{ role: 'user', parts: [{ text: 'anything' }] }] + } as GenerateContentRequest; + const response = await adapter.generateContentStreamOnDevice(request); + expect(createStub).to.have.been.calledOnceWith(onDeviceParams); + expect(promptStub).to.have.been.calledOnceWith([ + { + role: request.contents[0].role, + content: [ + { + type: 'text', + content: request.contents[0].parts[0].text + } + ] + } + ]); + const actual = await toStringArray(response.body!); + expect(actual).to.deep.equal([ + `data: {"candidates":[{"content":{"role":"model","parts":[{"text":["${part}"]}]}}]}\n\n` + ]); + }); + }); }); From 34c658e52dd4e6b245d0638e893208db7f5be01e Mon Sep 17 00:00:00 2001 From: Erik Eldridge Date: Fri, 18 Apr 2025 16:52:25 -0700 Subject: [PATCH 22/22] Remove redundant ondevice suffix --- packages/vertexai/src/methods/chrome-adapter.test.ts | 4 ++-- packages/vertexai/src/methods/chrome-adapter.ts | 6 ++---- packages/vertexai/src/methods/generate-content.test.ts | 8 ++++---- packages/vertexai/src/methods/generate-content.ts | 4 ++-- 4 files changed, 10 insertions(+), 12 deletions(-) diff --git a/packages/vertexai/src/methods/chrome-adapter.test.ts b/packages/vertexai/src/methods/chrome-adapter.test.ts index 18673e0f0e0..ed06ab9cb1d 100644 --- a/packages/vertexai/src/methods/chrome-adapter.test.ts +++ b/packages/vertexai/src/methods/chrome-adapter.test.ts @@ -299,7 +299,7 @@ describe('ChromeAdapter', () => { const request = { contents: [{ role: 'user', parts: [{ text: 'anything' }] }] } as GenerateContentRequest; - const response = await adapter.generateContentOnDevice(request); + const response = await adapter.generateContent(request); // Asserts initialization params are proxied. expect(createStub).to.have.been.calledOnceWith(onDeviceParams); // Asserts Vertex input type is mapped to Chrome type. @@ -355,7 +355,7 @@ describe('ChromeAdapter', () => { const request = { contents: [{ role: 'user', parts: [{ text: 'anything' }] }] } as GenerateContentRequest; - const response = await adapter.generateContentStreamOnDevice(request); + const response = await adapter.generateContentStream(request); expect(createStub).to.have.been.calledOnceWith(onDeviceParams); expect(promptStub).to.have.been.calledOnceWith([ { diff --git a/packages/vertexai/src/methods/chrome-adapter.ts b/packages/vertexai/src/methods/chrome-adapter.ts index 6c63c5a54f8..3279548676e 100644 --- a/packages/vertexai/src/methods/chrome-adapter.ts +++ b/packages/vertexai/src/methods/chrome-adapter.ts @@ -94,9 +94,7 @@ export class ChromeAdapter { * @param request a standard Vertex {@link GenerateContentRequest} * @returns {@link Response}, so we can reuse common response formatting. */ - async generateContentOnDevice( - request: GenerateContentRequest - ): Promise { + async generateContent(request: GenerateContentRequest): Promise { const session = await this.createSession( // TODO: normalize on-device params during construction. this.onDeviceParams || {} @@ -105,7 +103,7 @@ export class ChromeAdapter { const text = await session.prompt(messages); return ChromeAdapter.toResponse(text); } - async generateContentStreamOnDevice( + async generateContentStream( request: GenerateContentRequest ): Promise { const session = await this.createSession( diff --git a/packages/vertexai/src/methods/generate-content.test.ts b/packages/vertexai/src/methods/generate-content.test.ts index f714ec4d535..19c32941090 100644 --- a/packages/vertexai/src/methods/generate-content.test.ts +++ b/packages/vertexai/src/methods/generate-content.test.ts @@ -308,6 +308,7 @@ describe('generateContent()', () => { ); expect(mockFetch).to.be.called; }); + // TODO: define a similar test for generateContentStream it('on-device', async () => { const chromeAdapter = new ChromeAdapter(); const isAvailableStub = stub(chromeAdapter, 'isAvailable').resolves(true); @@ -315,10 +316,9 @@ describe('generateContent()', () => { 'vertexAI', 'unary-success-basic-reply-short.json' ); - const generateContentStub = stub( - chromeAdapter, - 'generateContentOnDevice' - ).resolves(mockResponse as Response); + const generateContentStub = stub(chromeAdapter, 'generateContent').resolves( + mockResponse as Response + ); const result = await generateContent( fakeApiSettings, 'model', diff --git a/packages/vertexai/src/methods/generate-content.ts b/packages/vertexai/src/methods/generate-content.ts index a6343bcc3a8..1dc5918516e 100644 --- a/packages/vertexai/src/methods/generate-content.ts +++ b/packages/vertexai/src/methods/generate-content.ts @@ -53,7 +53,7 @@ export async function generateContentStream( ): Promise { let response; if (await chromeAdapter.isAvailable(params)) { - response = await chromeAdapter.generateContentStreamOnDevice(params); + response = await chromeAdapter.generateContentStream(params); } else { response = await generateContentStreamOnCloud( apiSettings, @@ -90,7 +90,7 @@ export async function generateContent( ): Promise { let response; if (await chromeAdapter.isAvailable(params)) { - response = await chromeAdapter.generateContentOnDevice(params); + response = await chromeAdapter.generateContent(params); } else { response = await generateContentOnCloud( apiSettings,