diff --git a/people-and-planet-ai/conftest.py b/people-and-planet-ai/conftest.py index fed54feb9b9..1bf49d26a00 100644 --- a/people-and-planet-ai/conftest.py +++ b/people-and-planet-ai/conftest.py @@ -84,7 +84,7 @@ def bucket_name(test_name: str, location: str, unique_id: str) -> Iterable[str]: # Try to remove all files before deleting the bucket. # Deleting a bucket with too many files results in an error. try: - run_cmd("gsutil", "-m", "rm", "-rf", f"gs://{bucket_name}/*") + run_cmd("gcloud", "storage", "rm", "--recursive", f"gs://{bucket_name}/**") except RuntimeError: # If no files were found and it fails, ignore the error. pass diff --git a/pubsublite/spark-connector/README.md b/pubsublite/spark-connector/README.md index c133fd66f64..cdef86589f7 100644 --- a/pubsublite/spark-connector/README.md +++ b/pubsublite/spark-connector/README.md @@ -54,7 +54,7 @@ Get the connector's uber jar from this [public Cloud Storage location]. Alternat ```bash export BUCKET_ID=your-gcs-bucket-id - gsutil mb gs://$BUCKET_ID + gcloud storage buckets create gs://$BUCKET_ID ``` ## Python setup