Skip to content
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,15 @@

package com.example.storage.object;

// [START storage_start_appendable_object_upload]
// [START storage_create_and_write_appendable_object_upload]

import com.google.cloud.storage.BlobAppendableUpload;
import com.google.cloud.storage.BlobAppendableUpload.AppendableUploadWriteableByteChannel;
import com.google.cloud.storage.BlobAppendableUploadConfig;
import com.google.cloud.storage.BlobAppendableUploadConfig.CloseAction;
import com.google.cloud.storage.BlobId;
import com.google.cloud.storage.BlobInfo;
import com.google.cloud.storage.FlushPolicy;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.StorageOptions;
import com.google.common.io.ByteStreams;
Expand All @@ -33,8 +34,8 @@
import java.nio.file.Paths;
import java.util.Locale;

public class StartAppendableObjectUpload {
public static void startAppendableObjectUpload(
public class CreateAndWriteAppendableObject {
public static void createAndWriteAppendableObject(
String bucketName, String objectName, String filePath) throws Exception {
// The ID of your GCS bucket
// String bucketName = "your-unique-bucket-name";
Expand All @@ -49,12 +50,18 @@ public static void startAppendableObjectUpload(
BlobId blobId = BlobId.of(bucketName, objectName);
BlobInfo blobInfo = BlobInfo.newBuilder(blobId).build();

int flushSize = 64 * 1000;
FlushPolicy.MaxFlushSizeFlushPolicy flushPolicy = FlushPolicy.maxFlushSize(flushSize);
BlobAppendableUploadConfig config =
BlobAppendableUploadConfig.of().withCloseAction(CloseAction.CLOSE_WITHOUT_FINALIZING);
BlobAppendableUploadConfig.of()
.withCloseAction(CloseAction.FINALIZE_WHEN_CLOSING)
.withFlushPolicy(flushPolicy);
BlobAppendableUpload uploadSession = storage.blobAppendableUpload(blobInfo, config);
try (AppendableUploadWriteableByteChannel channel = uploadSession.open();
ReadableByteChannel readableByteChannel = FileChannel.open(Paths.get(filePath))) {
ByteStreams.copy(readableByteChannel, channel);
// Since the channel is in a try-with-resources block, channel.close()
// will be implicitly called here, which triggers the finalization.
} catch (IOException ex) {
throw new IOException("Failed to upload to object " + blobId.toGsUtilUri(), ex);
}
Expand All @@ -67,4 +74,4 @@ public static void startAppendableObjectUpload(
}
}

// [END storage_start_appendable_object_upload]
// [END storage_create_and_write_appendable_object_upload]
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.example.storage.object;

// [START storage_open_multiple_objects_ranged_read]

import com.google.api.core.ApiFuture;
import com.google.api.core.ApiFutures;
import com.google.cloud.storage.BlobId;
import com.google.cloud.storage.BlobReadSession;
import com.google.cloud.storage.RangeSpec;
import com.google.cloud.storage.ReadAsFutureBytes;
import com.google.cloud.storage.ReadProjectionConfigs;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.StorageOptions;
import com.google.common.util.concurrent.MoreExecutors;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;

public class OpenMultipleObjectsRangedRead {
public static void multipleObjectsSingleRangedRead(
String bucketName, List<String> objectNames, long startOffset, int length) throws Exception {
// The ID of your GCS bucket
// String bucketName = "your-unique-bucket-name";

// The ID of your GCS objects to read
// List<String> objectName = Arrays.asList("object-1", "object-2", "object-3");

RangeSpec singleRange = RangeSpec.of(startOffset, length);
ReadAsFutureBytes rangeConfig =
ReadProjectionConfigs.asFutureBytes().withRangeSpec(singleRange);

try (Storage storage = StorageOptions.grpc().build().getService()) {
List<ApiFuture<byte[]>> futuresToWaitOn = new ArrayList<>();

System.out.printf(
"Initiating single ranged read [%d, %d] on %d objects...%n",
startOffset, startOffset + length - 1, objectNames.size());

for (String objectName : objectNames) {
BlobId blobId = BlobId.of(bucketName, objectName);
ApiFuture<BlobReadSession> futureReadSession = storage.blobReadSession(blobId);

ApiFuture<byte[]> readAndCloseFuture =
ApiFutures.transformAsync(
futureReadSession,
(BlobReadSession session) -> {
ApiFuture<byte[]> readFuture = session.readAs(rangeConfig);

readFuture.addListener(
() -> {
try {
session.close();
} catch (java.io.IOException e) {
System.err.println(
"WARN: Background error while closing session: " + e.getMessage());
}
},
MoreExecutors.directExecutor());
return readFuture;
},
MoreExecutors.directExecutor());

futuresToWaitOn.add(readAndCloseFuture);
}
ApiFutures.allAsList(futuresToWaitOn).get(30, TimeUnit.SECONDS);

System.out.println("All concurrent single-ranged read operations are complete.");
}
}
}
// [END storage_open_multiple_objects_ranged_read]
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

package com.example.storage.object;

// [START storage_read_appendable_object_multiple_ranges]
// [START storage_open_object_multiple_ranged_read]

import com.google.api.core.ApiFuture;
import com.google.api.core.ApiFutures;
Expand All @@ -30,10 +30,28 @@
import java.util.List;
import java.util.concurrent.TimeUnit;

public class AppendableObjectMultipleRangedRead {
public static void appendableObjectMultipleRangedRead(
public class OpenObjectMultipleRangedRead {
public static void openObjectMultipleRangedRead(
String bucketName, String objectName, long offset1, int length1, long offset2, int length2)
throws Exception {
// The ID of your GCS bucket
// String bucketName = "your-unique-bucket-name";

// The ID of your GCS object
// String objectName = "your-object-name";

// The beginning of the range 1
// long offset = 0

// The maximum number of bytes to read in range 1
// int length = 16;

// The beginning of the range 2
// long offset = 16

// The maximum number of bytes to read in range 2
// int length = 32;

try (Storage storage = StorageOptions.grpc().build().getService()) {
BlobId blobId = BlobId.of(bucketName, objectName);
ApiFuture<BlobReadSession> futureBlobReadSession = storage.blobReadSession(blobId);
Expand Down Expand Up @@ -62,4 +80,4 @@ public static void appendableObjectMultipleRangedRead(
}
}

// [END storage_read_appendable_object_multiple_ranges]
// [END storage_open_object_multiple_ranged_read]
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

package com.example.storage.object;

// [START storage_read_appendable_object_full]
// [START storage_open_object_read_full_object]

import com.google.api.core.ApiFuture;
import com.google.cloud.storage.BlobId;
Expand All @@ -30,9 +30,15 @@
import java.util.Locale;
import java.util.concurrent.TimeUnit;

public class AppendableObjectReadFullObject {
public static void appendableObjectReadFullObject(String bucketName, String objectName)
public class OpenObjectReadFullObject {
public static void openObjectReadFullObject(String bucketName, String objectName)
throws Exception {
// The ID of your GCS bucket
// String bucketName = "your-unique-bucket-name";

// The ID of your GCS object to read
// String objectName = "your-object-name";

try (Storage storage = StorageOptions.grpc().build().getService()) {
BlobId blobId = BlobId.of(bucketName, objectName);
ApiFuture<BlobReadSession> futureBlobReadSession = storage.blobReadSession(blobId);
Expand Down Expand Up @@ -60,4 +66,4 @@ public static void appendableObjectReadFullObject(String bucketName, String obje
}
}
}
// [END storage_read_appendable_object_full]
// [END storage_open_object_read_full_object]
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

package com.example.storage.object;

// [START storage_read_appendable_object_single_range]
// [START storage_open_object_single_ranged_read]

import com.google.api.core.ApiFuture;
import com.google.cloud.storage.BlobId;
Expand All @@ -27,9 +27,20 @@
import com.google.cloud.storage.StorageOptions;
import java.util.concurrent.TimeUnit;

public class AppendableObjectSingleRangedRead {
public static void appendableObjectSingleRangedRead(
public class OpenObjectSingleRangedRead {
public static void openObjectSingleRangedRead(
String bucketName, String objectName, long offset, int length) throws Exception {
// The ID of your GCS bucket
// String bucketName = "your-unique-bucket-name";

// The ID of your GCS object
// String objectName = "your-object-name";

// The beginning of the range
// long offset = 0

// The maximum number of bytes to read from the object.
// int length = 64;

try (Storage storage = StorageOptions.grpc().build().getService()) {
BlobId blobId = BlobId.of(bucketName, objectName);
Expand All @@ -55,4 +66,4 @@ public static void appendableObjectSingleRangedRead(
}
}
}
// [END storage_read_appendable_object_single_range]
// [END storage_open_object_single_ranged_read]
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
Expand All @@ -16,7 +16,7 @@

package com.example.storage.object;

// [START storage_resume_appendable_object_upload]
// [START storage_pause_and_resume_appendable_object_upload]

import com.google.cloud.storage.Blob;
import com.google.cloud.storage.BlobAppendableUpload;
Expand All @@ -26,66 +26,82 @@
import com.google.cloud.storage.BlobId;
import com.google.cloud.storage.BlobInfo;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.StorageChannelUtils;
import com.google.cloud.storage.StorageOptions;
import com.google.common.io.ByteStreams;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.charset.StandardCharsets;
import java.nio.file.Paths;
import java.util.Locale;

public class ResumeAppendableObjectUpload {
public static void resumeAppendableObjectUpload(
public class PauseAndResumeAppendableObjectUpload {
public static void pauseAndResumeAppendableObjectUpload(
String bucketName, String objectName, String filePath) throws Exception {
// The ID of your GCS bucket
// String bucketName = "your-unique-bucket-name";

// The ID of your GCS unfinalized appendable object
// The ID of your GCS object
// String objectName = "your-object-name";

// The path to the file to upload
// String filePath = "path/to/your/file";

try (Storage storage = StorageOptions.grpc().build().getService()) {
BlobId blobId = BlobId.of(bucketName, objectName);
Blob existingBlob = storage.get(blobId);
BlobInfo blobInfoForTakeover = BlobInfo.newBuilder(existingBlob.getBlobId()).build();
BlobInfo blobInfo = BlobInfo.newBuilder(blobId).build();

// --- Step 1: Initial string write (PAUSE) ---
// Default close action will be CLOSE_WITHOUT_FINALIZING
BlobAppendableUploadConfig initialConfig = BlobAppendableUploadConfig.of();
BlobAppendableUpload initialUploadSession =
storage.blobAppendableUpload(blobInfo, initialConfig);

try (AppendableUploadWriteableByteChannel channel = initialUploadSession.open()) {
String initialData = "Initial data segment.\n";
ByteBuffer buffer = ByteBuffer.wrap(initialData.getBytes(StandardCharsets.UTF_8));
long totalBytesWritten = StorageChannelUtils.blockingEmptyTo(buffer, channel);
channel.flush();

System.out.printf(
Locale.US, "Wrote %d bytes (initial string) in first segment.\n", totalBytesWritten);
} catch (IOException ex) {
throw new IOException("Failed initial upload to object " + blobId.toGsUtilUri(), ex);
}

Blob existingBlob = storage.get(blobId);
long currentObjectSize = existingBlob.getSize();
System.out.printf(
Locale.US,
"Resuming upload for %s. Currently uploaded size: %d bytes\n",
blobId.toGsUtilUri(),
"Initial upload paused. Currently uploaded size: %d bytes\n",
currentObjectSize);

BlobAppendableUploadConfig config =
BlobAppendableUploadConfig.of().withCloseAction(CloseAction.CLOSE_WITHOUT_FINALIZING);
// --- Step 2: Resume upload with file content and finalize ---
// Use FINALIZE_WHEN_CLOSING to ensure the object is finalized on channel closure.
BlobAppendableUploadConfig resumeConfig =
BlobAppendableUploadConfig.of().withCloseAction(CloseAction.FINALIZE_WHEN_CLOSING);
BlobAppendableUpload resumeUploadSession =
storage.blobAppendableUpload(blobInfoForTakeover, config);
storage.blobAppendableUpload(existingBlob.toBuilder().build(), resumeConfig);

try (FileChannel fileChannel = FileChannel.open(Paths.get(filePath));
AppendableUploadWriteableByteChannel channel = resumeUploadSession.open()) {
long bytesToAppend = fileChannel.size();
System.out.printf(
Locale.US,
"Appending the entire file (%d bytes) after the initial string.\n",
bytesToAppend);

if (fileChannel.size() < currentObjectSize) {
throw new IOException(
"Local file is smaller than the already uploaded data. File size: "
+ fileChannel.size()
+ ", Uploaded size: "
+ currentObjectSize);
} else if (fileChannel.size() == currentObjectSize) {
System.out.println("No more data to upload.");
} else {
fileChannel.position(currentObjectSize);
System.out.printf(
Locale.US, "Appending %d bytes\n", fileChannel.size() - currentObjectSize);
ByteStreams.copy(fileChannel, channel);
}
ByteStreams.copy(fileChannel, channel);
}

BlobInfo result = storage.get(blobId);
System.out.printf(
Locale.US,
"Object %s successfully resumed. Total size: %d\n",
"\nObject %s successfully resumed and finalized. Total size: %d bytes\n",
result.getBlobId().toGsUtilUriWithGeneration(),
result.getSize());
}
}
}
// [END storage_resume_appendable_object_upload]
// [END storage_pause_and_resume_appendable_object_upload]
Loading