Create a chat session with a Generative Model

This sample demonstrates how to use Generative Models to create a chat session.

Code sample

Go

Before trying this sample, follow the Go setup instructions in the Vertex AI quickstart using client libraries . For more information, see the Vertex AI Go API reference documentation .

To authenticate to Vertex AI, set up Application Default Credentials. For more information, see Set up authentication for a local development environment .

  import 
  
 ( 
  
 "context" 
  
 "fmt" 
  
 "io" 
  
 "google.golang.org/genai" 
 ) 
 // generateChatWithText shows how to generate chat using a text prompt. 
 func 
  
 generateChatWithText 
 ( 
 w 
  
 io 
 . 
 Writer 
 ) 
  
 error 
  
 { 
  
 ctx 
  
 := 
  
 context 
 . 
 Background 
 () 
  
 client 
 , 
  
 err 
  
 := 
  
 genai 
 . 
 NewClient 
 ( 
 ctx 
 , 
  
& genai 
 . 
 ClientConfig 
 { 
  
 HTTPOptions 
 : 
  
 genai 
 . 
 HTTPOptions 
 { 
 APIVersion 
 : 
  
 "v1" 
 }, 
  
 }) 
  
 if 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 fmt 
 . 
 Errorf 
 ( 
 "failed to create genai client: %w" 
 , 
  
 err 
 ) 
  
 } 
  
 modelName 
  
 := 
  
 "gemini-2.5-flash" 
  
 history 
  
 := 
  
 [] 
 * 
 genai 
 . 
 Content 
 { 
  
 { 
  
 Role 
 : 
  
 "user" 
 , 
  
 Parts 
 : 
  
 [] 
 * 
 genai 
 . 
 Part 
 { 
  
 { 
 Text 
 : 
  
 "Hello there" 
 }, 
  
 }, 
  
 }, 
  
 { 
  
 Role 
 : 
  
 "model" 
 , 
  
 Parts 
 : 
  
 [] 
 * 
 genai 
 . 
 Part 
 { 
  
 { 
 Text 
 : 
  
 "Great to meet you. What would you like to know?" 
 }, 
  
 }, 
  
 }, 
  
 } 
  
 chatSession 
 , 
  
 err 
  
 := 
  
 client 
 . 
 Chats 
 . 
 Create 
 ( 
 ctx 
 , 
  
 modelName 
 , 
  
 nil 
 , 
  
 history 
 ) 
  
 if 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 fmt 
 . 
 Errorf 
 ( 
 "failed to create genai chat session: %w" 
 , 
  
 err 
 ) 
  
 } 
  
 contents 
  
 := 
  
 genai 
 . 
 Part 
 { 
 Text 
 : 
  
 "Tell me a story." 
 } 
  
 resp 
 , 
  
 err 
  
 := 
  
 chatSession 
 . 
 SendMessage 
 ( 
 ctx 
 , 
  
 contents 
 ) 
  
 if 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 fmt 
 . 
 Errorf 
 ( 
 "failed to send message: %w" 
 , 
  
 err 
 ) 
  
 } 
  
 respText 
  
 := 
  
 resp 
 . 
 Text 
 () 
  
 fmt 
 . 
 Fprintln 
 ( 
 w 
 , 
  
 respText 
 ) 
  
 // Example response: 
  
 // Okay, settle in. Let me tell you a story about a quiet cartographer, but not of lands and seas. 
  
 // ... 
  
 // In the sleepy town of Oakhaven, nestled between the Whispering Hills and the Murmuring River, lived a woman named Elara. 
  
 // ... 
  
 return 
  
 nil 
 } 
 

Node.js

Before trying this sample, follow the Node.js setup instructions in the Vertex AI quickstart using client libraries . For more information, see the Vertex AI Node.js API reference documentation .

To authenticate to Vertex AI, set up Application Default Credentials. For more information, see Set up authentication for a local development environment .

  const 
  
 { 
 GoogleGenAI 
 } 
  
 = 
  
 require 
 ( 
 '@google/genai' 
 ); 
 const 
  
 GOOGLE_CLOUD_PROJECT 
  
 = 
  
 process 
 . 
 env 
 . 
 GOOGLE_CLOUD_PROJECT 
 ; 
 const 
  
 GOOGLE_CLOUD_LOCATION 
  
 = 
  
 process 
 . 
 env 
 . 
 GOOGLE_CLOUD_LOCATION 
  
 || 
  
 'global' 
 ; 
 async 
  
 function 
  
 generateText 
 ( 
  
 projectId 
  
 = 
  
 GOOGLE_CLOUD_PROJECT 
 , 
  
 location 
  
 = 
  
 GOOGLE_CLOUD_LOCATION 
 ) 
  
 { 
  
 const 
  
 client 
  
 = 
  
 new 
  
 GoogleGenAI 
 ({ 
  
 vertexai 
 : 
  
 true 
 , 
  
 project 
 : 
  
 projectId 
 , 
  
 location 
 : 
  
 location 
 , 
  
 }); 
  
 const 
  
 chatSession 
  
 = 
  
 client 
 . 
 chats 
 . 
 create 
 ({ 
  
 model 
 : 
  
 'gemini-2.5-flash' 
 , 
  
 history 
 : 
  
 [ 
  
 { 
  
 role 
 : 
  
 'user' 
 , 
  
 parts 
 : 
  
 [{ 
 text 
 : 
  
 'Hello' 
 }], 
  
 }, 
  
 { 
  
 role 
 : 
  
 'model' 
 , 
  
 parts 
 : 
  
 [{ 
 text 
 : 
  
 'Great to meet you. What would you like to know?' 
 }], 
  
 }, 
  
 ], 
  
 }); 
  
 const 
  
 response 
  
 = 
  
 await 
  
 chatSession 
 . 
 sendMessage 
 ({ 
 message 
 : 
  
 'Tell me a story.' 
 }); 
  
 console 
 . 
 log 
 ( 
 response 
 . 
 text 
 ); 
  
 // Example response: 
  
 // Okay, here's a story for you: 
  
 // ... 
  
 return 
  
 response 
 . 
 text 
 ; 
 } 
 

Python

Before trying this sample, follow the Python setup instructions in the Vertex AI quickstart using client libraries . For more information, see the Vertex AI Python API reference documentation .

To authenticate to Vertex AI, set up Application Default Credentials. For more information, see Set up authentication for a local development environment .

  from 
  
 google 
  
 import 
 genai 
 from 
  
 google.genai.types 
  
 import 
 HttpOptions 
 , 
 ModelContent 
 , 
 Part 
 , 
 UserContent 
 client 
 = 
 genai 
 . 
 Client 
 ( 
 http_options 
 = 
 HttpOptions 
 ( 
 api_version 
 = 
 "v1" 
 )) 
 chat_session 
 = 
 client 
 . 
 chats 
 . 
 create 
 ( 
 model 
 = 
 "gemini-2.5-flash" 
 , 
 history 
 = 
 [ 
 UserContent 
 ( 
 parts 
 = 
 [ 
 Part 
 ( 
 text 
 = 
 "Hello" 
 )]), 
 ModelContent 
 ( 
 parts 
 = 
 [ 
 Part 
 ( 
 text 
 = 
 "Great to meet you. What would you like to know?" 
 )], 
 ), 
 ], 
 ) 
 response 
 = 
 chat_session 
 . 
 send_message 
 ( 
 "Tell me a story." 
 ) 
 print 
 ( 
 response 
 . 
 text 
 ) 
 # Example response: 
 # Okay, here's a story for you: 
 # ... 
 

What's next

To search and filter code samples for other Google Cloud products, see the Google Cloud sample browser .

Create a Mobile Website
View Site in Mobile | Classic
Share by: