Skip to content

Commit cb2716c

Browse files
authored
chore: bump gemini flash versions in samples (#3909)
1 parent b6f7375 commit cb2716c

File tree

16 files changed

+34
-35
lines changed

16 files changed

+34
-35
lines changed

js/doc-snippets/src/flows/index.ts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ export const menuSuggestionFlow = ai.defineFlow(
2929
},
3030
async (restaurantTheme) => {
3131
const { text } = await ai.generate({
32-
model: googleAI.model('gemini-2.0-flash'),
32+
model: googleAI.model('gemini-2.5-flash'),
3333
prompt: `Invent a menu item for a ${restaurantTheme} themed restaurant.`,
3434
});
3535
return text;
@@ -51,7 +51,7 @@ export const menuSuggestionFlowWithSchema = ai.defineFlow(
5151
},
5252
async (restaurantTheme) => {
5353
const { output } = await ai.generate({
54-
model: googleAI.model('gemini-2.0-flash'),
54+
model: googleAI.model('gemini-2.5-flash'),
5555
prompt: `Invent a menu item for a ${restaurantTheme} themed restaurant.`,
5656
output: { schema: MenuItemSchema },
5757
});
@@ -72,7 +72,7 @@ export const menuSuggestionFlowMarkdown = ai.defineFlow(
7272
},
7373
async (restaurantTheme) => {
7474
const { output } = await ai.generate({
75-
model: googleAI.model('gemini-2.0-flash'),
75+
model: googleAI.model('gemini-2.5-flash'),
7676
prompt: `Invent a menu item for a ${restaurantTheme} themed restaurant.`,
7777
output: { schema: MenuItemSchema },
7878
});
@@ -94,7 +94,7 @@ export const menuSuggestionStreamingFlow = ai.defineFlow(
9494
},
9595
async (restaurantTheme, { sendChunk }) => {
9696
const response = await ai.generateStream({
97-
model: googleAI.model('gemini-2.0-flash'),
97+
model: googleAI.model('gemini-2.5-flash'),
9898
prompt: `Invent a menu item for a ${restaurantTheme} themed restaurant.`,
9999
});
100100

@@ -128,7 +128,7 @@ export const complexMenuSuggestionFlow = ai.defineFlow(
128128
outputSchema: PrixFixeMenuSchema,
129129
},
130130
async (theme: string): Promise<z.infer<typeof PrixFixeMenuSchema>> => {
131-
const chat = ai.chat({ model: googleAI.model('gemini-2.0-flash') });
131+
const chat = ai.chat({ model: googleAI.model('gemini-2.5-flash') });
132132
await chat.send('What makes a good prix fixe menu?');
133133
await chat.send(
134134
'What are some ingredients, seasonings, and cooking techniques that ' +
@@ -178,7 +178,7 @@ Today's menu
178178
}
179179
);
180180
const { text } = await ai.generate({
181-
model: googleAI.model('gemini-2.0-flash'),
181+
model: googleAI.model('gemini-2.5-flash'),
182182
system: "Help the user answer questions about today's menu.",
183183
prompt: input,
184184
docs: [{ content: [{ text: menu }] }],

js/doc-snippets/src/index.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ import { genkit } from 'genkit';
1919

2020
const ai = genkit({
2121
plugins: [googleAI()],
22-
model: googleAI.model('gemini-2.0-flash'),
22+
model: googleAI.model('gemini-2.5-flash'),
2323
});
2424

2525
async function main() {

js/doc-snippets/src/models/index.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,13 @@ import { genkit } from 'genkit';
1919

2020
const ai = genkit({
2121
plugins: [googleAI()],
22-
model: googleAI.model('gemini-2.0-flash'),
22+
model: googleAI.model('gemini-2.5-flash'),
2323
});
2424

2525
async function fn01() {
2626
// [START ex01]
2727
const { text } = await ai.generate({
28-
model: googleAI.model('gemini-2.0-flash'),
28+
model: googleAI.model('gemini-2.5-flash'),
2929
prompt: 'Invent a menu item for a pirate themed restaurant.',
3030
});
3131
// [END ex01]
@@ -34,7 +34,7 @@ async function fn01() {
3434
async function fn02() {
3535
// [START ex02]
3636
const { text } = await ai.generate({
37-
model: 'googleai/gemini-2.0-flash-001',
37+
model: 'googleai/gemini-2.5-flash',
3838
prompt: 'Invent a menu item for a pirate themed restaurant.',
3939
});
4040
// [END ex02]

js/doc-snippets/src/models/minimal.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ import { genkit } from 'genkit';
2020

2121
const ai = genkit({
2222
plugins: [googleAI()],
23-
model: googleAI.model('gemini-2.0-flash'),
23+
model: googleAI.model('gemini-2.5-flash'),
2424
});
2525

2626
async function main() {

js/testapps/dev-ui-gallery/prompts/code.prompt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
---
2-
model: googleai/gemini-2.0-flash
2+
model: googleai/gemini-2.5-flash
33
config:
44
temperature: 0.4
55
safetySettings:

js/testapps/dev-ui-gallery/prompts/markdown.prompt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
---
2-
model: googleai/gemini-2.0-flash
2+
model: googleai/gemini-2.5-flash
33
config:
44
temperature: 0.4
55
safetySettings:

js/testapps/flow-simple-ai/prompts/dotpromptContext.prompt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
---
2-
model: vertexai/gemini-2.0-flash
2+
model: vertexai/gemini-2.5-flash
33
input:
44
schema:
55
question: string

js/testapps/flow-simple-ai/src/index.ts

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,7 @@ export const streamFlowVertex = ai.defineFlow(
151151
},
152152
async (prompt, { sendChunk }) => {
153153
const { response, stream } = ai.generateStream({
154-
model: vertexAI.model('gemini-2.0-flash-001', { temperature: 0.77 }),
154+
model: vertexAI.model('gemini-2.5-flash', { temperature: 0.77 }),
155155
prompt,
156156
});
157157

@@ -171,7 +171,7 @@ export const streamFlowGemini = ai.defineFlow(
171171
},
172172
async (prompt, { sendChunk }) => {
173173
const { response, stream } = ai.generateStream({
174-
model: googleAI.model('gemini-2.0-flash-001', { temperature: 0.77 }),
174+
model: googleAI.model('gemini-2.5-flash', { temperature: 0.77 }),
175175
prompt,
176176
});
177177

@@ -207,7 +207,7 @@ export const streamJsonFlow = ai.defineFlow(
207207
},
208208
async (count, { sendChunk }) => {
209209
const { response, stream } = ai.generateStream({
210-
model: googleAI.model('gemini-2.0-flash'),
210+
model: googleAI.model('gemini-2.5-flash'),
211211
output: {
212212
schema: GameCharactersSchema,
213213
},
@@ -506,7 +506,7 @@ export const dynamicToolCaller = ai.defineFlow(
506506
);
507507

508508
const { response, stream } = ai.generateStream({
509-
model: googleAI.model('gemini-2.0-flash'),
509+
model: googleAI.model('gemini-2.5-flash'),
510510
config: {
511511
temperature: 1,
512512
},
@@ -901,7 +901,7 @@ ai.defineFlow(
901901
},
902902
async ({ url, prompt, model }) => {
903903
const { text } = await ai.generate({
904-
model: model || 'googleai/gemini-2.0-flash',
904+
model: model || 'googleai/gemini-2.5-flash',
905905
prompt: [{ text: prompt }, { media: { url, contentType: 'video/mp4' } }],
906906
});
907907
return text;
@@ -1112,7 +1112,7 @@ async function saveWaveFile(
11121112

11131113
ai.defineFlow('googleSearch', async (thing) => {
11141114
const { text } = await ai.generate({
1115-
model: googleAI.model('gemini-2.0-flash'),
1115+
model: googleAI.model('gemini-2.5-flash'),
11161116
prompt: `What is a banana?`,
11171117
config: { tools: [{ googleSearch: {} }] },
11181118
});
@@ -1122,7 +1122,7 @@ ai.defineFlow('googleSearch', async (thing) => {
11221122

11231123
ai.defineFlow('googleSearchRetrieval', async (thing) => {
11241124
const { text } = await ai.generate({
1125-
model: vertexAI.model('gemini-2.0-flash'),
1125+
model: vertexAI.model('gemini-2.5-flash'),
11261126
prompt: `What is a banana?`,
11271127
config: { googleSearchRetrieval: {} },
11281128
});
@@ -1144,7 +1144,7 @@ ai.defineFlow('googleai-imagen', async (thing) => {
11441144

11451145
ai.defineFlow('meme-of-the-day', async () => {
11461146
const { text: script } = await ai.generate({
1147-
model: googleAI.model('gemini-2.0-flash'),
1147+
model: googleAI.model('gemini-2.5-flash'),
11481148
prompt:
11491149
'Write a detailed script for a 8 second video. The video should be a meme of the day. ' +
11501150
'A Silly DIY FAIL situation like a: broken tools, or bad weather or crooked assembly, etc. Be creative. The FAIL should be very obvious. ' +
@@ -1266,7 +1266,7 @@ ai.defineResource(
12661266

12671267
ai.defineFlow('resource', async () => {
12681268
return await ai.generate({
1269-
model: googleAI.model('gemini-2.0-flash'),
1269+
model: googleAI.model('gemini-2.5-flash'),
12701270
prompt: [
12711271
{ text: 'analyze this: ' },
12721272
{ resource: { uri: 'my://resource/value' } },

js/testapps/format-tester/src/index.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,6 @@ if (!models.length) {
153153
'vertexai/gemini-2.5-flash',
154154
'googleai/gemini-2.5-pro',
155155
'googleai/gemini-2.5-flash',
156-
'googleai/gemini-2.0-flash',
157156
];
158157
}
159158

js/testapps/multiagents-demo/src/agents/representativeAgent.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ import { getStoreInfoTool } from '../tools';
2020
// Simple agent for providing store information
2121
export const agent = ai.definePrompt({
2222
name: 'representativeAgent',
23-
model: 'googleai/gemini-2.0-flash',
23+
model: 'googleai/gemini-2.5-flash',
2424
description: 'Representative Agent can provide store info',
2525
tools: [getStoreInfoTool],
2626
system: `You are a customer service representative for TechStore Computer Shop.

0 commit comments

Comments
 (0)