Method: projects.locations.cachedContents.create

Creates cached content, this call will initialize the cached content in the data storage, and users need to pay for the cache data storage.

Endpoint

post https: / /{endpoint} /v1beta1 /{parent} /cachedContents

Where {service-endpoint} is one of the supported service endpoints .

Path parameters

parent string

Required. The parent resource where the cached content will be created

Request body

The request body contains an instance of CachedContent .

Example request

C#

  using 
  
 Google.Cloud.AIPlatform.V1Beta1 
 ; 
 using 
  
 Google.Protobuf.WellKnownTypes 
 ; 
 using 
  
 System 
 ; 
 using 
  
 System.Threading.Tasks 
 ; 
 public 
  
 class 
  
 CreateContextCache 
 { 
  
 public 
  
 async 
  
 Task<CachedContentName> 
  
 Create 
 ( 
 string 
  
 projectId 
 ) 
  
 { 
  
 var 
  
 client 
  
 = 
  
 await 
  
 new 
  
 GenAiCacheServiceClientBuilder 
  
 { 
  
 Endpoint 
  
 = 
  
 "us-central1-aiplatform.googleapis.com" 
  
 }. 
 BuildAsync 
 (); 
  
 var 
  
 request 
  
 = 
  
 new 
  
 CreateCachedContentRequest 
  
 { 
  
 Parent 
  
 = 
  
 $"projects/{projectId}/locations/us-central1" 
 , 
  
 CachedContent 
  
 = 
  
 new 
  
 CachedContent 
  
 { 
  
 Model 
  
 = 
  
 $"projects/{projectId}/locations/us-central1/publishers/google/models/gemini-1.5-pro-001" 
 , 
  
 SystemInstruction 
  
 = 
  
 new 
  
 Content 
  
 { 
  
 Parts 
  
 = 
  
 { 
  
 new 
  
 Part 
  
 { 
  
 Text 
  
 = 
  
 "You are an expert researcher. You always stick to the facts in the sources provided and" 
  
 + 
  
 " never make up new facts. Now look at these research papers, and answer the following questions." 
  
 } 
  
 } 
  
 }, 
  
 Contents 
  
 = 
  
 { 
  
 new 
  
 Content 
  
 { 
  
 Role 
  
 = 
  
 "USER" 
 , 
  
 Parts 
  
 = 
  
 { 
  
 new 
  
 Part 
  
 { 
  
 FileData 
  
 = 
  
 new 
 () 
  
 { 
  
 MimeType 
  
 = 
  
 "application/pdf" 
 , 
  
 FileUri 
  
 = 
  
 "gs://cloud-samples-data/generative-ai/pdf/2312.11805v3.pdf" 
  
 } 
  
 }, 
  
 new 
  
 Part 
  
 { 
  
 FileData 
  
 = 
  
 new 
 () 
  
 { 
  
 MimeType 
  
 = 
  
 "application/pdf" 
 , 
  
 FileUri 
  
 = 
  
 "gs://cloud-samples-data/generative-ai/pdf/2403.05530.pdf" 
  
 } 
  
 } 
  
 } 
  
 } 
  
 }, 
  
 Ttl 
  
 = 
  
 Duration 
 . 
 FromTimeSpan 
 ( 
 TimeSpan 
 . 
 FromMinutes 
 ( 
 60 
 )) 
  
 } 
  
 }; 
  
 var 
  
 cachedContent 
  
 = 
  
 await 
  
 client 
 . 
 CreateCachedContentAsync 
 ( 
 request 
 ); 
  
 Console 
 . 
 WriteLine 
 ( 
 $"Created cache: {cachedContent.CachedContentName}" 
 ); 
  
 return 
  
 cachedContent 
 . 
 CachedContentName 
 ; 
  
 } 
 } 
  
 

Go

  import 
  
 ( 
  
 "context" 
  
 "fmt" 
  
 "io" 
  
 "time" 
  
 "cloud.google.com/go/vertexai/genai" 
 ) 
 // createContextCache shows how to create a cached content, and returns its name. 
 func 
  
 createContextCache 
 ( 
 w 
  
 io 
 . 
 Writer 
 , 
  
 projectID 
 , 
  
 location 
 , 
  
 modelName 
  
 string 
 ) 
  
 ( 
 string 
 , 
  
 error 
 ) 
  
 { 
  
 // location := "us-central1" 
  
 // modelName := "gemini-1.5-pro-001" 
  
 ctx 
  
 := 
  
 context 
 . 
 Background 
 () 
  
 systemInstruction 
  
 := 
  
 ` 
 You are an expert researcher. You always stick to the facts in the sources provided, and never make up new facts. 
 Now look at these research papers, and answer the following questions. 
 ` 
  
 client 
 , 
  
 err 
  
 := 
  
 genai 
 . 
 NewClient 
 ( 
 ctx 
 , 
  
 projectID 
 , 
  
 location 
 ) 
  
 if 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 "" 
 , 
  
 fmt 
 . 
 Errorf 
 ( 
 "unable to create client: %w" 
 , 
  
 err 
 ) 
  
 } 
  
 defer 
  
 client 
 . 
 Close 
 () 
  
 // These PDF are viewable at 
  
 //   https://storage.googleapis.com/cloud-samples-data/generative-ai/pdf/2312.11805v3.pdf 
  
 //   https://storage.googleapis.com/cloud-samples-data/generative-ai/pdf/2403.05530.pdf 
  
 part1 
  
 := 
  
 genai 
 . 
 FileData 
 { 
  
 MIMEType 
 : 
  
 "application/pdf" 
 , 
  
 FileURI 
 : 
  
 "gs://cloud-samples-data/generative-ai/pdf/2312.11805v3.pdf" 
 , 
  
 } 
  
 part2 
  
 := 
  
 genai 
 . 
 FileData 
 { 
  
 MIMEType 
 : 
  
 "application/pdf" 
 , 
  
 FileURI 
 : 
  
 "gs://cloud-samples-data/generative-ai/pdf/2403.05530.pdf" 
 , 
  
 } 
  
 content 
  
 := 
  
& genai 
 . 
 CachedContent 
 { 
  
 Model 
 : 
  
 modelName 
 , 
  
 SystemInstruction 
 : 
  
& genai 
 . 
 Content 
 { 
  
 Parts 
 : 
  
 [] 
 genai 
 . 
 Part 
 { 
 genai 
 . 
 Text 
 ( 
 systemInstruction 
 )}, 
  
 }, 
  
 Expiration 
 : 
  
 genai 
 . 
 ExpireTimeOrTTL 
 { 
 TTL 
 : 
  
 60 
  
 * 
  
 time 
 . 
 Minute 
 }, 
  
 Contents 
 : 
  
 [] 
 * 
 genai 
 . 
 Content 
 { 
  
 { 
  
 Role 
 : 
  
 "user" 
 , 
  
 Parts 
 : 
  
 [] 
 genai 
 . 
 Part 
 { 
 part1 
 , 
  
 part2 
 }, 
  
 }, 
  
 }, 
  
 } 
  
 result 
 , 
  
 err 
  
 := 
  
 client 
 . 
 CreateCachedContent 
 ( 
 ctx 
 , 
  
 content 
 ) 
  
 if 
  
 err 
  
 != 
  
 nil 
  
 { 
  
 return 
  
 "" 
 , 
  
 fmt 
 . 
 Errorf 
 ( 
 "CreateCachedContent: %w" 
 , 
  
 err 
 ) 
  
 } 
  
 fmt 
 . 
 Fprint 
 ( 
 w 
 , 
  
 result 
 . 
 Name 
 ) 
  
 return 
  
 result 
 . 
 Name 
 , 
  
 nil 
 } 
  
 

Python

  import 
 vertexai 
 import 
 datetime 
 from 
 vertexai.generative_models 
 import 
 Part 
 from 
 vertexai.preview 
 import 
 caching 
 # TODO(developer): Update and un-comment below line 
 # PROJECT_ID = "your-project-id" 
 vertexai 
 . 
 init 
 ( 
 project 
 = 
 PROJECT_ID 
 , 
 location 
 = 
 "us-central1" 
 ) 
 system_instruction 
 = 
 """ 
 You are an expert researcher. You always stick to the facts in the sources provided, and never make up new facts. 
 Now look at these research papers, and answer the following questions. 
 """ 
 contents 
 = 
 [ 
 Part 
 . 
 from_uri 
 ( 
 "gs://cloud-samples-data/generative-ai/pdf/2312.11805v3.pdf" 
 , 
 mime_type 
 = 
 "application/pdf" 
 , 
 ), 
 Part 
 . 
 from_uri 
 ( 
 "gs://cloud-samples-data/generative-ai/pdf/2403.05530.pdf" 
 , 
 mime_type 
 = 
 "application/pdf" 
 , 
 ), 
 ] 
 cached_content 
 = 
 caching 
 . 
 CachedContent 
 . 
 create 
 ( 
 model_name 
 = 
 "gemini-1.5-pro-002" 
 , 
 system_instruction 
 = 
 system_instruction 
 , 
 contents 
 = 
 contents 
 , 
 ttl 
 = 
 datetime 
 . 
 timedelta 
 ( 
 minutes 
 = 
 60 
 ), 
 display_name 
 = 
 "example-cache" 
 , 
 ) 
 print 
 ( 
 cached_content 
 . 
 name 
 ) 
 # Example response: 
 # 1234567890  
 
 

Response body

If successful, the response body contains a newly created instance of CachedContent .