Copy an archived generation of an object in a Cloud Storage bucket.
Explore further
For detailed documentation that includes this code sample, see the following:
Code sample
C++
For more information, see the Cloud Storage C++ API reference documentation .
To authenticate to Cloud Storage, set up Application Default Credentials. For more information, see Set up authentication for client libraries .
namespace
gcs
=
::
google
::
cloud
::
storage
;
using
::
google
::
cloud
::
StatusOr
;
[](
gcs
::
Client
client
,
std
::
string
const
&
source_bucket_name
,
std
::
string
const
&
source_object_name
,
std
::
string
const
&
destination_bucket_name
,
std
::
string
const
&
destination_object_name
,
std
::
int64_t
source_object_generation
)
{
StatusOr<gcs
::
ObjectMetadata
>
copy
=
client
.
CopyObject
(
source_bucket_name
,
source_object_name
,
destination_bucket_name
,
destination_object_name
,
gcs
::
SourceGeneration
{
source_object_generation
});
if
(
!
copy
)
throw
std
::
move
(
copy
).
status
();
std
::
cout
<<
"Successfully copied "
<<
source_object_name
<<
" generation "
<<
source_object_generation
<<
" in bucket "
<<
source_bucket_name
<<
" to bucket "
<<
copy
-
> bucket
()
<<
" with name "
<<
copy
-
> name
()
<<
".
\n
The full metadata after the copy is: "
<<
*
copy
<<
"
\n
"
;
}
C#
For more information, see the Cloud Storage C# API reference documentation .
To authenticate to Cloud Storage, set up Application Default Credentials. For more information, see Set up authentication for client libraries .
using
Google.Cloud.Storage.V1
;
using
System
;
public
class
CopyFileArchivedGenerationSample
{
public
Google
.
Apis
.
Storage
.
v1
.
Data
.
Object
CopyFileArchivedGeneration
(
string
sourceBucketName
=
"source-bucket-name"
,
string
sourceObjectName
=
"source-file"
,
string
destBucketName
=
"destination-bucket-name"
,
string
destObjectName
=
"destination-file-name"
,
long?
generation
=
1579287380533984
)
{
var
storage
=
StorageClient
.
Create
();
var
copyOptions
=
new
CopyObjectOptions
{
SourceGeneration
=
generation
};
var
copiedFile
=
storage
.
CopyObject
(
sourceBucketName
,
sourceObjectName
,
destBucketName
,
destObjectName
,
copyOptions
);
Console
.
WriteLine
(
$"Generation {generation} of the object {sourceBucketName}/{sourceObjectName} "
+
$"was copied to to {destBucketName}/{destObjectName}."
);
return
copiedFile
;
}
}
Go
For more information, see the Cloud Storage Go API reference documentation .
To authenticate to Cloud Storage, set up Application Default Credentials. For more information, see Set up authentication for client libraries .
import
(
"context"
"fmt"
"io"
"time"
"cloud.google.com/go/storage"
)
// copyOldVersionOfObject copies a noncurrent version of an object.
func
copyOldVersionOfObject
(
w
io
.
Writer
,
bucket
,
srcObject
,
dstObject
string
,
gen
int64
)
error
{
// bucket := "bucket-name"
// srcObject := "source-object-name"
// dstObject := "destination-object-name"
// gen is the generation of srcObject to copy.
// gen := 1587012235914578
ctx
:=
context
.
Background
()
client
,
err
:=
storage
.
NewClient
(
ctx
)
if
err
!=
nil
{
return
fmt
.
Errorf
(
"storage.NewClient: %w"
,
err
)
}
defer
client
.
Close
()
ctx
,
cancel
:=
context
.
WithTimeout
(
ctx
,
time
.
Second
*
10
)
defer
cancel
()
src
:=
client
.
Bucket
(
bucket
).
Object
(
srcObject
)
dst
:=
client
.
Bucket
(
bucket
).
Object
(
dstObject
)
// Optional: set a generation-match precondition to avoid potential race
// conditions and data corruptions. The request to copy is aborted if the
// object's generation number does not match your precondition.
// For a dst object that does not yet exist, set the DoesNotExist precondition.
dst
=
dst
.
If
(
storage
.
Conditions
{
DoesNotExist
:
true
})
// If the destination object already exists in your bucket, set instead a
// generation-match precondition using its generation number.
// attrs, err := dst.Attrs(ctx)
// if err != nil {
// return fmt.Errorf("object.Attrs: %w", err)
// }
// dst = dst.If(storage.Conditions{GenerationMatch: attrs.Generation})
if
_
,
err
:=
dst
.
CopierFrom
(
src
.
Generation
(
gen
)).
Run
(
ctx
);
err
!=
nil
{
return
fmt
.
Errorf
(
"Object(%q).CopierFrom(%q).Generation(%v).Run: %w"
,
dstObject
,
srcObject
,
gen
,
err
)
}
fmt
.
Fprintf
(
w
,
"Generation %v of object %v in bucket %v was copied to %v\n"
,
gen
,
srcObject
,
bucket
,
dstObject
)
return
nil
}
Java
For more information, see the Cloud Storage Java API reference documentation .
To authenticate to Cloud Storage, set up Application Default Credentials. For more information, see Set up authentication for client libraries .
import
com.google.cloud.storage. BlobId
;
import
com.google.cloud.storage. Storage
;
import
com.google.cloud.storage. StorageOptions
;
public
class
CopyOldVersionOfObject
{
public
static
void
copyOldVersionOfObject
(
String
projectId
,
String
bucketName
,
String
objectToCopy
,
long
generationToCopy
,
String
newObjectName
)
{
// The ID of your GCP project
// String projectId = "your-project-id";
// The ID of your GCS bucket
// String bucketName = "your-unique-bucket-name";
// The ID of the GCS object to copy an old version of
// String objectToCopy = "your-object-name";
// The generation of objectToCopy to copy
// long generationToCopy = 1579287380533984;
// What to name the new object with the old data from objectToCopy
// String newObjectName = "your-new-object";
Storage
storage
=
StorageOptions
.
newBuilder
().
setProjectId
(
projectId
).
build
().
getService
();
// Optional: set a generation-match precondition to avoid potential race
// conditions and data corruptions. The request returns a 412 error if the
// preconditions are not met.
Storage
.
BlobTargetOption
precondition
;
if
(
storage
.
get
(
bucketName
,
newObjectName
)
==
null
)
{
// For a target object that does not yet exist, set the DoesNotExist precondition.
// This will cause the request to fail if the object is created before the request runs.
precondition
=
Storage
.
BlobTargetOption
.
doesNotExist
();
}
else
{
// If the destination already exists in your bucket, instead set a generation-match
// precondition. This will cause the request to fail if the existing object's generation
// changes before the request runs.
precondition
=
Storage
.
BlobTargetOption
.
generationMatch
(
storage
.
get
(
bucketName
,
newObjectName
).
getGeneration
());
}
Storage
.
CopyRequest
copyRequest
=
Storage
.
CopyRequest
.
newBuilder
()
.
setSource
(
BlobId
.
of
(
bucketName
,
objectToCopy
,
generationToCopy
))
.
setTarget
(
BlobId
.
of
(
bucketName
,
newObjectName
),
precondition
)
.
build
();
storage
.
copy
(
copy
Request );
System
.
out
.
println
(
"Generation "
+
generationToCopy
+
" of object "
+
objectToCopy
+
" in bucket "
+
bucketName
+
" was copied to "
+
newObjectName
);
}
}
Node.js
For more information, see the Cloud Storage Node.js API reference documentation .
To authenticate to Cloud Storage, set up Application Default Credentials. For more information, see Set up authentication for client libraries .
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of your GCS bucket
// const srcBucketName = "your-unique-bucket-name";
// The ID of the GCS file to copy an old version of
// const srcFilename = "your-file-name";
// The generation of fileToCopy to copy
// const generation = 1579287380533984;
// The ID of the bucket to copy the file to
// const destBucketName = 'target-file-bucket';
// What to name the new file with the old data from srcFilename
// const destFileName = "your-new-file";
// Imports the Google Cloud client library
const
{
Storage
}
=
require
(
' @google-cloud/storage
'
);
// Creates a client
const
storage
=
new
Storage
();
async
function
copyOldVersionOfFile
()
{
// Copies the file to the other bucket
// Optional:
// Set a generation-match precondition to avoid potential race conditions
// and data corruptions. The request to copy is aborted if the object's
// generation number does not match your precondition. For a destination
// object that does not yet exist, set the ifGenerationMatch precondition to 0
// If the destination object already exists in your bucket, set instead a
// generation-match precondition using its generation number.
const
copyOptions
=
{
preconditionOpts
:
{
ifGenerationMatch
:
destinationGenerationMatchPrecondition
,
},
};
await
storage
.
bucket
(
srcBucketName
)
.
file
(
srcFilename
,
{
generation
,
})
.
copy
(
storage
.
bucket
(
destBucketName
).
file
(
destFileName
),
copyOptions
);
console
.
log
(
`Generation
${
generation
}
of file
${
srcFilename
}
in bucket
${
srcBucketName
}
was copied to
${
destFileName
}
in bucket
${
destBucketName
}
`
);
}
copyOldVersionOfFile
().
catch
(
console
.
error
);
PHP
For more information, see the Cloud Storage PHP API reference documentation .
To authenticate to Cloud Storage, set up Application Default Credentials. For more information, see Set up authentication for client libraries .
use Google\Cloud\Storage\StorageClient;
/**
* Copy archived generation of a given object to a new object.
*
* @param string $bucketName The name of your Cloud Storage bucket.
* (e.g. 'my-bucket')
* @param string $objectToCopy The name of the object to copy.
* (e.g. 'my-object')
* @param string $generationToCopy The generation of the object to copy.
* (e.g. 1579287380533984)
* @param string $newObjectName The name of the target object.
* (e.g. 'my-object-1579287380533984')
*/
function copy_file_archived_generation(string $bucketName, string $objectToCopy, string $generationToCopy, string $newObjectName): void
{
$storage = new StorageClient();
$bucket = $storage->bucket($bucketName);
$object = $bucket->object($objectToCopy, [
'generation' => $generationToCopy,
]);
$object->copy($bucket, [
'name' => $newObjectName,
]);
printf(
'Generation %s of object %s in bucket %s was copied to %s',
$generationToCopy,
$objectToCopy,
$bucketName,
$newObjectName
);
}
Python
For more information, see the Cloud Storage Python API reference documentation .
To authenticate to Cloud Storage, set up Application Default Credentials. For more information, see Set up authentication for client libraries .
from
google.cloud
import
storage
def
copy_file_archived_generation
(
bucket_name
,
blob_name
,
destination_bucket_name
,
destination_blob_name
,
generation
):
"""Copies a blob from one bucket to another with a new name with the same generation."""
# bucket_name = "your-bucket-name"
# blob_name = "your-object-name"
# destination_bucket_name = "destination-bucket-name"
# destination_blob_name = "destination-object-name"
# generation = 1579287380533984
storage_client
=
storage
.
Client
()
source_bucket
=
storage_client
.
bucket
(
bucket_name
)
source_blob
=
source_bucket
.
blob
(
blob_name
)
destination_bucket
=
storage_client
.
bucket
(
destination_bucket_name
)
# Optional: set a generation-match precondition to avoid potential race conditions
# and data corruptions. The request to copy is aborted if the object's
# generation number does not match your precondition. For a destination
# object that does not yet exist, set the if_generation_match precondition to 0.
# If the destination object already exists in your bucket, set instead a
# generation-match precondition using its generation number.
destination_generation_match_precondition
=
0
# source_generation selects a specific revision of the source object, as opposed to the latest version.
blob_copy
=
source_bucket
.
copy_blob
(
source_blob
,
destination_bucket
,
destination_blob_name
,
source_generation
=
generation
,
if_generation_match
=
destination_generation_match_precondition
)
print
(
"Generation
{}
of the blob
{}
in bucket
{}
copied to blob
{}
in bucket
{}
."
.
format
(
generation
,
source_blob
.
name
,
source_bucket
.
name
,
blob_copy
.
name
,
destination_bucket
.
name
,
)
)
Ruby
For more information, see the Cloud Storage Ruby API reference documentation .
To authenticate to Cloud Storage, set up Application Default Credentials. For more information, see Set up authentication for client libraries .
def
copy_file_archived_generation
source_bucket_name
:,
source_file_name
:,
generation
:,
destination_bucket_name
:,
destination_file_name
:
# The ID of the bucket the original object is in
# source_bucket_name = "source-bucket-name"
# The ID of the GCS object to copy
# source_file_name = "source-file-name"
# The generation of your GCS object to copy
# generation = 1579287380533984
# The ID of the bucket to copy the object to
# destination_bucket_name = "destination-bucket-name"
# The ID of the new GCS object
# destination_file_name = "destination-file-name"
require
"google/cloud/storage"
storage
=
Google
::
Cloud
::
Storage
.
new
source_bucket
=
storage
.
bucket
source_bucket_name
,
skip_lookup
:
true
source_file
=
source_bucket
.
file
source_file_name
destination_bucket
=
storage
.
bucket
destination_bucket_name
,
skip_lookup
:
true
destination_file
=
source_file
.
copy
destination_bucket
,
destination_file_name
,
generation
:
generation
puts
"Generation
#{
generation
}
of the file
#{
source_file
.
name
}
in bucket
#{
source_bucket
.
name
}
copied to file "
\
"
#{
destination_file
.
name
}
in bucket
#{
destination_bucket
.
name
}
"
end
What's next
To search and filter code samples for other Google Cloud products, see the Google Cloud sample browser .