Skip to content
Open
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions docs/guide/container_component.md
Original file line number Diff line number Diff line change
Expand Up @@ -70,8 +70,8 @@ grep_component = tfx.dsl.components.create_container_component(
parameters={
'pattern': str,
},
# The component code uses gsutil to upload the data to Google Cloud Storage, so the
# container image needs to have gsutil installed and configured.
# The component code uses gcloud storage to upload the data to Google Cloud Storage, so the
# container image needs to have gcloud storage installed and configured.
image='google/cloud-sdk:278.0.0',
command=[
'sh', '-exc',
Expand All @@ -83,13 +83,13 @@ grep_component = tfx.dsl.components.create_container_component(
filtered_text_path=$(mktemp)

# Getting data into the container
gsutil cp "$text_uri" "$text_path"
gcloud storage cp "$text_uri" "$text_path"

# Running the main code
grep "$pattern" "$text_path" >"$filtered_text_path"

# Getting data out of the container
gsutil cp "$filtered_text_path" "$filtered_text_uri"
gcloud storage cp "$filtered_text_path" "$filtered_text_uri"
''',
'--pattern', tfx.dsl.placeholders.InputValuePlaceholder('pattern'),
'--text', tfx.dsl.placeholders.InputUriPlaceholder('text'),
Expand Down
6 changes: 3 additions & 3 deletions docs/tutorials/tfx/gcp/vertex_pipelines_simple.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -387,7 +387,7 @@
},
"outputs": [],
"source": [
"!gsutil cp gs://download.tensorflow.org/data/palmer_penguins/penguins_processed.csv {DATA_ROOT}/"
"!gcloud storage cp gs://download.tensorflow.org/data/palmer_penguins/penguins_processed.csv {DATA_ROOT}/"
]
},
{
Expand All @@ -407,7 +407,7 @@
},
"outputs": [],
"source": [
"!gsutil cat {DATA_ROOT}/penguins_processed.csv | head"
"!gcloud storage cat {DATA_ROOT}/penguins_processed.csv | head"
]
},
{
Expand Down Expand Up @@ -607,7 +607,7 @@
},
"outputs": [],
"source": [
"!gsutil cp {_trainer_module_file} {MODULE_ROOT}/"
"!gcloud storage cp {_trainer_module_file} {MODULE_ROOT}/"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion docs/tutorials/tfx/stub_template.md
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ tfx run create --pipeline-name $pipeline_name --endpoint=$endpoint \

Use command `fg` to access the port-forwarding in the background then ctrl-C to
terminate. You can delete the directory with recorded pipeline outputs using
`gsutil -m rm -R $output_dir`.
`gcloud storage rm --recursive $output_dir`.

To clean up all Google Cloud resources used in this project, you can
[delete the Google Cloud project](https://cloud.google.com/resource-manager/docs/creating-managing-projects#shutting_down_projects)
Expand Down
2 changes: 1 addition & 1 deletion docs/tutorials/tfx/template.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -408,7 +408,7 @@
},
"outputs": [],
"source": [
"!gsutil cp data/data.csv gs://{GOOGLE_CLOUD_PROJECT}-kubeflowpipelines-default/tfx-template/data/taxi/data.csv"
"!gcloud storage cp data/data.csv gs://{GOOGLE_CLOUD_PROJECT}-kubeflowpipelines-default/tfx-template/data/taxi/data.csv"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion tfx/components/util/udf_utils_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ def testAddModuleDependencyAndPackage(self):
self.assertEqual(
dependency,
os.path.join(
temp_pipeline_root, '_wheels', 'tfx_user_code_MyComponent-0.0+'
temp_pipeline_root, '_wheels', 'tfx_user_code_mycomponent-0.0+'
'1c9b861db85cc54c56a56cbf64f77c1b9d1ded487d60a97d082ead6b250ee62c'
'-py3-none-any.whl'))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@ echo Running cloud serving...
# Dir for model exported for serving, e.g., gs://<bucket>/serving_model/chicago_taxi_pipeline_kubeflow
CLOUD_MODEL_DIR=$1

gsutil ls $CLOUD_MODEL_DIR
gcloud storage ls $CLOUD_MODEL_DIR

# Pick out the directory containing the last trained model.
MODEL_BINARIES=$(gsutil ls $CLOUD_MODEL_DIR \
MODEL_BINARIES=$(gcloud storage ls $CLOUD_MODEL_DIR \
| sort | grep '\/[0-9]*\/$' | tail -n1)

echo latest model: $MODEL_BINARIES
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
wget "$0" -O "$output_data_path" || curl "$0" > "$output_data_path"

# Getting data out of the container
gsutil cp "$output_data_path" "$output_data_uri"
gcloud storage cp "$output_data_path" "$output_data_uri"
''',
placeholders.InputValuePlaceholder('url'),
placeholders.OutputUriPlaceholder('data'),
Expand Down Expand Up @@ -74,13 +74,13 @@
filtered_text_path=$(mktemp)

# Getting data into the container
gsutil cp "$text_uri" "$text_path"
gcloud storage cp "$text_uri" "$text_path"

# Running the main code
grep "$pattern" "$text_path" >"$filtered_text_path"

# Getting data out of the container
gsutil cp "$filtered_text_path" "$filtered_text_uri"
gcloud storage cp "$filtered_text_path" "$filtered_text_uri"
''',
placeholders.InputValuePlaceholder('pattern'),
placeholders.InputUriPlaceholder('text'),
Expand All @@ -105,7 +105,7 @@
text_path=$(mktemp)

# Getting data into the container
gsutil cp "$text_uri" "$text_path"
gcloud storage cp "$text_uri" "$text_path"

# Running the main code
cat "$text_path"
Expand Down
2 changes: 1 addition & 1 deletion tfx/examples/custom_components/slack/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ pipeline in Google Cloud Platform for production.
First prepare a gcs bucket for the pipeline run root:

```bash
gsutil mb -p ${PROJECT_ID} gs://${BUCKET_NAME}
gcloud storage buckets create gs://${BUCKET_NAME} --project=${PROJECT_ID}
```

Let's copy the dataset CSV to the GCS where TFX ExampleGen will ingest it
Expand Down
6 changes: 3 additions & 3 deletions tfx/examples/penguin/experimental/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,8 @@ pipeline python file. Output can be found at `[BUCKET]/tfx`.

<pre class="devsite-terminal devsite-click-to-copy">
vi ~/penguin/experimental/penguin_pipeline_sklearn_gcp.py
gsutil -m cp -r ~/penguin/data/* gs://[BUCKET]/penguin/data/
gsutil -m cp ~/penguin/experimental/\*.py gs://[BUCKET]/penguin/experimental/
gcloud storage cp --recursive ~/penguin/data/* gs://[BUCKET]/penguin/data/
gcloud storage cp ~/penguin/experimental/\*.py gs://[BUCKET]/penguin/experimental/

tfx pipeline create \
--engine kubeflow \
Expand All @@ -77,7 +77,7 @@ tfx pipeline create \
</pre>

Note that
`gsutil -m cp ~/penguin/experimental/*.py gs://[BUCKET]/penguin/experimental`
`gcloud storage cp ~/penguin/experimental/*.py gs://[BUCKET]/penguin/experimental`
will need to be run every time updates are made to the GCP example.
Additionally, subsequent pipeline deployments should use `tfx pipeline update`
instead of `tfx pipeline create`.
2 changes: 1 addition & 1 deletion tfx/orchestration/kubeflow/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ class HelloWorldComponent(BaseComponent):
args=[
'echo "hello ' +
ph.exec_property('word') +
'" | gsutil cp - ' +
'" | gcloud storage cp - ' +
ph.output('greeting')[0].uri
])

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def _tasks_for_pipeline_with_artifact_value_passing():
echo "Hello $message" >"$output_data_path"

# Getting data out of the container
gsutil cp -r "$output_data_path" "$output_data_uri"
gcloud storage cp --recursive "$output_data_path" "$output_data_uri"
""",
placeholders.InputValuePlaceholder('message'),
placeholders.OutputUriPlaceholder('data'),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
wget "$0" -O "$output_data_path" || curl "$0" > "$output_data_path"

# Getting data out of the container
gsutil cp "$output_data_path" "$output_data_uri"
gcloud storage cp "$output_data_path" "$output_data_uri"
''',
ph.exec_property('url'),
ph.output('data')[0].uri,
Expand Down Expand Up @@ -74,13 +74,13 @@
filtered_text_path=$(mktemp)

# Getting data into the container
gsutil cp "$text_uri" "$text_path"
gcloud storage cp "$text_uri" "$text_path"

# Running the main code
grep "$pattern" "$text_path" >"$filtered_text_path"

# Getting data out of the container
gsutil cp "$filtered_text_path" "$filtered_text_uri"
gcloud storage cp "$filtered_text_path" "$filtered_text_uri"
''',
ph.exec_property('pattern'),
ph.input('text')[0].uri,
Expand All @@ -105,7 +105,7 @@
text_path=$(mktemp)

# Getting data into the container
gsutil cp "$text_uri" "$text_path"
gcloud storage cp "$text_uri" "$text_path"

# Running the main code
cat "$text_path"
Expand Down
Loading