From d69eea7019e3f7e241879768f8e7e0a638a74f31 Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Fri, 9 Jan 2026 16:37:59 +0000 Subject: [PATCH 1/2] chore: migrate cleanup commands from gsutil to gcloud storage --- people-and-planet-ai/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/people-and-planet-ai/conftest.py b/people-and-planet-ai/conftest.py index fed54feb9b9..1bf49d26a00 100644 --- a/people-and-planet-ai/conftest.py +++ b/people-and-planet-ai/conftest.py @@ -84,7 +84,7 @@ def bucket_name(test_name: str, location: str, unique_id: str) -> Iterable[str]: # Try to remove all files before deleting the bucket. # Deleting a bucket with too many files results in an error. try: - run_cmd("gsutil", "-m", "rm", "-rf", f"gs://{bucket_name}/*") + run_cmd("gcloud", "storage", "rm", "--recursive", f"gs://{bucket_name}/**") except RuntimeError: # If no files were found and it fails, ignore the error. pass From 4aa4cb42a523baf6f7dc3fcdda9fce5b9f121ed6 Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Fri, 9 Jan 2026 16:53:15 +0000 Subject: [PATCH 2/2] chore: migrate cleanup commands from gsutil to gcloud storage --- pubsublite/spark-connector/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pubsublite/spark-connector/README.md b/pubsublite/spark-connector/README.md index c133fd66f64..cdef86589f7 100644 --- a/pubsublite/spark-connector/README.md +++ b/pubsublite/spark-connector/README.md @@ -54,7 +54,7 @@ Get the connector's uber jar from this [public Cloud Storage location]. Alternat ```bash export BUCKET_ID=your-gcs-bucket-id - gsutil mb gs://$BUCKET_ID + gcloud storage buckets create gs://$BUCKET_ID ``` ## Python setup