Skip to content

Commit 2dbb786

Browse files
refactor: rename tensorflow-example to tfserving-example, since we are actually deploying in kserve using the tf-serving runtime
1 parent 0044bb0 commit 2dbb786

File tree

3 files changed

+11
-10
lines changed

3 files changed

+11
-10
lines changed

deployment/kserve/scripts/deploy_tensorflow_model.bash

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -17,24 +17,24 @@ minio_api_node_port=$(kubectl get svc minio-service --namespace minio -o jsonpat
1717
minio_external_api_url="http://$minikube_ip:$minio_api_node_port"
1818

1919
# Deploy model and make it accessible
20-
service_name="tensorflow-example"
20+
service_name="tfserving-example"
2121
host_model_path="../../../models/tensorflow/1"
22-
s3_model_dir="s3://test-bucket/tensorflow-example/1"
23-
storage_uri="\"s3://test-bucket/tensorflow-example\""
22+
s3_model_dir="s3://test-bucket/tfserving-example/1"
23+
storage_uri="\"s3://test-bucket/tfserving-example\""
2424
aws s3 cp --recursive "$host_model_path" $s3_model_dir --profile $minio_aws_profile_name --endpoint-url=$minio_external_api_url &> /dev/null
2525
sed -e "s/{{ inference_service_resource_name }}/$service_name/g" \
2626
-e "s/{{ service_account_resource_name }}/$service_account_resource_name/g" \
2727
-e "s|{{ s3_model_root_path }}|$storage_uri|g" \
28-
$templates_path/tensorflow.yaml \
29-
> $tmp_kubeconfigs_path/tensorflow-isvc.yaml
30-
deploy_service "default" "$tmp_kubeconfigs_path/tensorflow-isvc.yaml" "$service_name"
28+
$templates_path/tfserving.yaml \
29+
> $tmp_kubeconfigs_path/tfserving-isvc.yaml
30+
deploy_service "default" "$tmp_kubeconfigs_path/tfserving-isvc.yaml" "$service_name"
3131
wait_for_inference_service 300 5 "$service_name" "default"
3232

3333
# Test predictions
3434
istio_node_port=$(kubectl get svc istio-ingressgateway --namespace istio-system -o jsonpath='{.spec.ports[?(@.name=="http2")].nodePort}')
3535
istio_base_url="http://$minikube_ip:$istio_node_port"
36-
model_name="tensorflow-example"
37-
service_name="tensorflow-example"
36+
model_name="tfserving-example"
37+
service_name="tfserving-example"
3838
namespace="default"
3939
url="${istio_base_url}/v1/models/${model_name}:predict"
4040
service_hostname=$(kubectl get inferenceservice ${service_name} --namespace "$namespace" -o jsonpath='{.status.url}' | cut -d "/" -f 3)

deployment/kserve/templates/tensorflow.yaml renamed to deployment/kserve/templates/tfserving.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,4 +10,5 @@ spec:
1010
model:
1111
modelFormat:
1212
name: tensorflow
13+
runtime: kserve-tensorflow-serving
1314
storageUri: {{ s3_model_root_path }}

deployment/tensorflow/README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,6 @@ If using `minikube` to deploy the lightgbm model using MLServer behind KServe, y
3434
```bash
3535
python test_predictions.py \
3636
-r 360950 \
37-
-u "http://$(minikube ip):$(kubectl get svc istio-ingressgateway --namespace istio-system -o jsonpath='{.spec.ports[?(@.name=="http2")].nodePort}')/v1/models/tensorflow-example:predict" \
38-
-H "Host=$(kubectl get inferenceservice tensorflow-example --namespace default -o jsonpath='{.status.url}' | cut -d "/" -f 3)"
37+
-u "http://$(minikube ip):$(kubectl get svc istio-ingressgateway --namespace istio-system -o jsonpath='{.spec.ports[?(@.name=="http2")].nodePort}')/v1/models/tfserving-example:predict" \
38+
-H "Host=$(kubectl get inferenceservice tfserving-example --namespace default -o jsonpath='{.status.url}' | cut -d "/" -f 3)"
3939
```

0 commit comments

Comments
 (0)