这是indexloc提供的服务,不要输入任何密码
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.example.storage.object;

// [START storage_read_appendable_object_multiple_ranges]

import com.google.api.core.ApiFuture;
import com.google.api.core.ApiFutures;
import com.google.cloud.storage.BlobId;
import com.google.cloud.storage.BlobReadSession;
import com.google.cloud.storage.RangeSpec;
import com.google.cloud.storage.ReadProjectionConfigs;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.StorageOptions;
import com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.concurrent.TimeUnit;

public class AppendableObjectMultipleRangedRead {
public static void appendableObjectMultipleRangedRead(
String bucketName, String objectName, long offset1, int length1, long offset2, int length2)
throws Exception {
try (Storage storage = StorageOptions.grpc().build().getService()) {
BlobId blobId = BlobId.of(bucketName, objectName);
ApiFuture<BlobReadSession> futureBlobReadSession = storage.blobReadSession(blobId);
RangeSpec rangeSpec1 = RangeSpec.of(offset1, length1);
RangeSpec rangeSpec2 = RangeSpec.of(offset2, length2);

try (BlobReadSession blobReadSession = futureBlobReadSession.get(10, TimeUnit.SECONDS)) {
ApiFuture<byte[]> future1 =
blobReadSession.readAs(ReadProjectionConfigs.asFutureBytes().withRangeSpec(rangeSpec1));
ApiFuture<byte[]> future2 =
blobReadSession.readAs(ReadProjectionConfigs.asFutureBytes().withRangeSpec(rangeSpec2));

List<byte[]> allBytes = ApiFutures.allAsList(ImmutableList.of(future1, future2)).get();

byte[] bytes1 = allBytes.get(0);
byte[] bytes2 = allBytes.get(1);

System.out.println(
"Successfully read "
+ bytes1.length
+ " bytes from range 1 and "
+ bytes2.length
+ " bytes from range 2.");
}
}
}
}

// [END storage_read_appendable_object_multiple_ranges]
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.example.storage.object;

// [START storage_read_appendable_object_full]

import com.google.api.core.ApiFuture;
import com.google.cloud.storage.BlobId;
import com.google.cloud.storage.BlobReadSession;
import com.google.cloud.storage.ReadAsChannel;
import com.google.cloud.storage.ReadProjectionConfigs;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.StorageOptions;
import java.nio.ByteBuffer;
import java.nio.channels.ScatteringByteChannel;
import java.util.Locale;
import java.util.concurrent.TimeUnit;

public class AppendableObjectReadFullObject {
public static void appendableObjectReadFullObject(String bucketName, String objectName)
throws Exception {
try (Storage storage = StorageOptions.grpc().build().getService()) {
BlobId blobId = BlobId.of(bucketName, objectName);
ApiFuture<BlobReadSession> futureBlobReadSession = storage.blobReadSession(blobId);

try (BlobReadSession blobReadSession = futureBlobReadSession.get(10, TimeUnit.SECONDS)) {

ReadAsChannel readAsChannelConfig = ReadProjectionConfigs.asChannel();
try (ScatteringByteChannel channel = blobReadSession.readAs(readAsChannelConfig)) {
long totalBytesRead = 0;
ByteBuffer buffer = ByteBuffer.allocate(64 * 1024);
int bytesRead;

while ((bytesRead = channel.read(buffer)) != -1) {
totalBytesRead += bytesRead;
buffer.clear();
}

System.out.printf(
Locale.US,
"Successfully read a total of %d bytes from object %s%n",
totalBytesRead,
blobId.toGsUtilUri());
}
}
}
}
}
// [END storage_read_appendable_object_full]
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.example.storage.object;

// [START storage_read_appendable_object_single_range]

import com.google.api.core.ApiFuture;
import com.google.cloud.storage.BlobId;
import com.google.cloud.storage.BlobReadSession;
import com.google.cloud.storage.RangeSpec;
import com.google.cloud.storage.ReadProjectionConfigs;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.StorageOptions;
import java.util.concurrent.TimeUnit;

public class AppendableObjectSingleRangedRead {
public static void appendableObjectSingleRangedRead(
String bucketName, String objectName, long offset, int length) throws Exception {

try (Storage storage = StorageOptions.grpc().build().getService()) {
BlobId blobId = BlobId.of(bucketName, objectName);
ApiFuture<BlobReadSession> futureBlobReadSession = storage.blobReadSession(blobId);

try (BlobReadSession blobReadSession = futureBlobReadSession.get(10, TimeUnit.SECONDS)) {
// Define the range of bytes to read.
RangeSpec rangeSpec = RangeSpec.of(offset, length);
ApiFuture<byte[]> future =
blobReadSession.readAs(ReadProjectionConfigs.asFutureBytes().withRangeSpec(rangeSpec));

// Wait for the read to complete.
byte[] bytes = future.get();

System.out.println(
"Successfully read "
+ bytes.length
+ " bytes from object "
+ objectName
+ " in bucket "
+ bucketName);
}
}
}
}
// [END storage_read_appendable_object_single_range]
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.example.storage.object;

// [START storage_finalize_appendable_object_upload]

import com.google.cloud.storage.Blob;
import com.google.cloud.storage.BlobAppendableUpload;
import com.google.cloud.storage.BlobAppendableUploadConfig;
import com.google.cloud.storage.BlobId;
import com.google.cloud.storage.BlobInfo;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.StorageOptions;

public class FinalizeAppendableObjectUpload {
public static void finalizeAppendableObjectUpload(String bucketName, String objectName)
throws Exception {
// The ID of your GCS bucket
// String bucketName = "your-unique-bucket-name";

// The ID of your GCS unfinalized appendable object
// String objectName = "your-object-name";

try (Storage storage = StorageOptions.grpc().build().getService()) {
BlobId blobId = BlobId.of(bucketName, objectName);
Blob existingBlob = storage.get(blobId);

if (existingBlob == null) {
System.out.println("Object " + objectName + " not found in bucket " + bucketName);
return;
}

BlobInfo blobInfoForTakeover = BlobInfo.newBuilder(existingBlob.getBlobId()).build();
BlobAppendableUpload finalizingSession =
storage.blobAppendableUpload(
blobInfoForTakeover,
BlobAppendableUploadConfig.of()
.withCloseAction(BlobAppendableUploadConfig.CloseAction.FINALIZE_WHEN_CLOSING));

try (BlobAppendableUpload.AppendableUploadWriteableByteChannel channel =
finalizingSession.open()) {
channel.finalizeAndClose();
}

System.out.println(
"Successfully finalized object " + objectName + " in bucket " + bucketName);
}
}
}
// [END storage_finalize_appendable_object_upload]
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
/*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.example.storage.object;

// [START storage_resume_appendable_object_upload]

import com.google.cloud.storage.Blob;
import com.google.cloud.storage.BlobAppendableUpload;
import com.google.cloud.storage.BlobAppendableUpload.AppendableUploadWriteableByteChannel;
import com.google.cloud.storage.BlobAppendableUploadConfig;
import com.google.cloud.storage.BlobAppendableUploadConfig.CloseAction;
import com.google.cloud.storage.BlobId;
import com.google.cloud.storage.BlobInfo;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.StorageOptions;
import com.google.common.io.ByteStreams;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.nio.file.Paths;
import java.util.Locale;

public class ResumeAppendableObjectUpload {
public static void resumeAppendableObjectUpload(
String bucketName, String objectName, String filePath) throws Exception {
// The ID of your GCS bucket
// String bucketName = "your-unique-bucket-name";

// The ID of your GCS unfinalized appendable object
// String objectName = "your-object-name";

// The path to the file to upload
// String filePath = "path/to/your/file";

try (Storage storage = StorageOptions.grpc().build().getService()) {
BlobId blobId = BlobId.of(bucketName, objectName);
Blob existingBlob = storage.get(blobId);
BlobInfo blobInfoForTakeover = BlobInfo.newBuilder(existingBlob.getBlobId()).build();

long currentObjectSize = existingBlob.getSize();
System.out.printf(
Locale.US,
"Resuming upload for %s. Currently uploaded size: %d bytes\n",
blobId.toGsUtilUri(),
currentObjectSize);

BlobAppendableUploadConfig config =
BlobAppendableUploadConfig.of().withCloseAction(CloseAction.CLOSE_WITHOUT_FINALIZING);
BlobAppendableUpload resumeUploadSession =
storage.blobAppendableUpload(blobInfoForTakeover, config);
try (FileChannel fileChannel = FileChannel.open(Paths.get(filePath));
AppendableUploadWriteableByteChannel channel = resumeUploadSession.open()) {

if (fileChannel.size() < currentObjectSize) {
throw new IOException(
"Local file is smaller than the already uploaded data. File size: "
+ fileChannel.size()
+ ", Uploaded size: "
+ currentObjectSize);
} else if (fileChannel.size() == currentObjectSize) {
System.out.println("No more data to upload.");
} else {
fileChannel.position(currentObjectSize);
System.out.printf(
Locale.US, "Appending %d bytes\n", fileChannel.size() - currentObjectSize);
ByteStreams.copy(fileChannel, channel);
}
}
BlobInfo result = storage.get(blobId);
System.out.printf(
Locale.US,
"Object %s successfully resumed. Total size: %d\n",
result.getBlobId().toGsUtilUriWithGeneration(),
result.getSize());
}
}
}
// [END storage_resume_appendable_object_upload]
Loading
Loading