From 639fc00e74408d2721b6fb20adf08d158a6b7f0b Mon Sep 17 00:00:00 2001 From: Diankun An <54262787+piaolaidelangman@users.noreply.github.com> Date: Wed, 9 Mar 2022 14:38:24 +0800 Subject: [PATCH] [PPML] Update Version in doc to 2.0.0 (#4206) * [PPML] Update PPML Image Version to 2.0.0 * Update PPML Image and Version to 2.0.0 --- ppml/kms-client/README.md | 2 +- .../python/docker-graphene/README.md | 10 +++++----- .../python/docker-graphene/deploy-local-spark-sgx.sh | 2 +- .../python/docker-graphene/environment.sh | 2 +- .../start-scripts/start-spark-local-bigdl-sgx.sh | 8 ++++---- ppml/trusted-big-data-ml/scala/docker-occlum/README.md | 2 +- .../scala/docker-occlum/build-docker-image.sh | 2 +- .../scala/docker-occlum/kubernetes/README.md | 4 ++-- .../scala/docker-occlum/kubernetes/executor.yaml | 2 +- .../scala/docker-occlum/kubernetes/run_spark_gbt.sh | 2 +- .../scala/docker-occlum/kubernetes/run_spark_lr.sh | 2 +- .../scala/docker-occlum/kubernetes/run_spark_pi.sh | 2 +- .../scala/docker-occlum/kubernetes/run_spark_sql.sh | 2 +- .../docker-occlum/kubernetes/run_spark_xgboost.sh | 4 ++-- .../scala/docker-occlum/run_spark_on_occlum_glibc.sh | 2 +- .../scala/docker-occlum/start-spark-local.sh | 2 +- .../docker-graphene/start-local-cluster-serving.sh | 2 +- ppml/trusted-realtime-ml/scala/docker-occlum/README.md | 2 +- .../scala/docker-occlum/build-docker-image.sh | 2 +- .../scala/docker-occlum/environment.sh | 2 +- .../scala/docker-occlum/start-local-cluster-serving.sh | 2 +- 21 files changed, 30 insertions(+), 30 deletions(-) diff --git a/ppml/kms-client/README.md b/ppml/kms-client/README.md index a9ae427da62..620e8b66867 100644 --- a/ppml/kms-client/README.md +++ b/ppml/kms-client/README.md @@ -69,7 +69,7 @@ Pay attention to the variables and set them to correct values according to your ```bash export INPUT_DIR_PATH=YOUR_DATA_FILE_DIRECTORY_PATH # For example, multiple CSV files are in this directory export KMS_SERVER_IP=YOUR_KMS_SERVER_IP # IP address of node where the previous KMS server is deployed -export DOCKER_IMAGE=intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:0.14.0-SNAPSHOT +export DOCKER_IMAGE=intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:2.0.0 sudo docker pull $DOCKER_IMAGE diff --git a/ppml/trusted-big-data-ml/python/docker-graphene/README.md b/ppml/trusted-big-data-ml/python/docker-graphene/README.md index e45e6125a16..e87505fe5ee 100644 --- a/ppml/trusted-big-data-ml/python/docker-graphene/README.md +++ b/ppml/trusted-big-data-ml/python/docker-graphene/README.md @@ -229,7 +229,7 @@ Run the example with SGX spark local mode with the following command in the term ```bash SGX=1 ./pal_loader bash -c "/opt/jdk8/bin/java -cp \ - '/ppml/trusted-big-data-ml/work/bigdl-0.14.0-SNAPSHOT/jars/*:/ppml/trusted-big-data-ml/work/spark-3.1.2/conf/:/ppml/trusted-big-data-ml/work/spark-3.1.2/jars/*' \ + '/ppml/trusted-big-data-ml/work/bigdl-2.0.0/jars/*:/ppml/trusted-big-data-ml/work/spark-3.1.2/conf/:/ppml/trusted-big-data-ml/work/spark-3.1.2/jars/*' \ -Xmx2g \ org.apache.spark.deploy.SparkSubmit \ --master 'local[4]' \ @@ -239,13 +239,13 @@ SGX=1 ./pal_loader bash -c "/opt/jdk8/bin/java -cp \ --conf spark.rpc.message.maxSize=190 \ --conf spark.network.timeout=10000000 \ --conf spark.executor.heartbeatInterval=10000000 \ - --properties-file /ppml/trusted-big-data-ml/work/bigdl-0.14.0-SNAPSHOT/conf/spark-bigdl.conf \ - --py-files /ppml/trusted-big-data-ml/work/bigdl-0.14.0-SNAPSHOT/python/bigdl-orca-spark_3.1.2-0.14.0-SNAPSHOT-python-api.zip,/ppml/trusted-big-data-ml/work/bigdl-0.14.0-SNAPSHOT/python/bigdl-dllib-spark_3.1.2-0.14.0-SNAPSHOT-python-api.zip,/ppml/trusted-big-data-ml/work/bigdl-0.14.0-SNAPSHOT/examples/dllib/lenet/lenet.py \ + --properties-file /ppml/trusted-big-data-ml/work/bigdl-2.0.0/conf/spark-bigdl.conf \ + --py-files /ppml/trusted-big-data-ml/work/bigdl-2.0.0/python/bigdl-orca-spark_3.1.2-2.0.0-python-api.zip,/ppml/trusted-big-data-ml/work/bigdl-2.0.0/python/bigdl-dllib-spark_3.1.2-2.0.0-python-api.zip,/ppml/trusted-big-data-ml/work/bigdl-2.0.0/examples/dllib/lenet/lenet.py \ --driver-cores 2 \ --total-executor-cores 2 \ --executor-cores 2 \ --executor-memory 8g \ - /ppml/trusted-big-data-ml/work/bigdl-0.14.0-SNAPSHOT/examples/dllib/lenet/lenet.py \ + /ppml/trusted-big-data-ml/work/bigdl-2.0.0/examples/dllib/lenet/lenet.py \ --dataPath /ppml/trusted-big-data-ml/work/data/mnist \ --maxEpoch 2" 2>&1 | tee test-bigdl-lenet-sgx.log ``` @@ -581,7 +581,7 @@ export KEYS_PATH=/YOUR_DIR/keys export SECURE_PASSWORD_PATH=/YOUR_DIR/password export KUBECONFIG_PATH=/YOUR_DIR/kuberconfig export LOCAL_IP=$LOCAL_IP -export DOCKER_IMAGE=intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:0.14.0-SNAPSHOT +export DOCKER_IMAGE=intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:2.0.0 sudo docker run -itd \ --privileged \ --net=host \ diff --git a/ppml/trusted-big-data-ml/python/docker-graphene/deploy-local-spark-sgx.sh b/ppml/trusted-big-data-ml/python/docker-graphene/deploy-local-spark-sgx.sh index 4235cc529fc..e75b92966ea 100755 --- a/ppml/trusted-big-data-ml/python/docker-graphene/deploy-local-spark-sgx.sh +++ b/ppml/trusted-big-data-ml/python/docker-graphene/deploy-local-spark-sgx.sh @@ -4,7 +4,7 @@ export ENCLAVE_KEY_PATH=YOUR_LOCAL_ENCLAVE_KEY_PATH export DATA_PATH=YOUR_LOCAL_DATA_PATH export KEYS_PATH=YOUR_LOCAL_KEYS_PATH export LOCAL_IP=YOUR_LOCAL_IP -export DOCKER_IMAGE=intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:0.14.0-SNAPSHOT +export DOCKER_IMAGE=intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:2.0.0 sudo docker pull $DOCKER_IMAGE diff --git a/ppml/trusted-big-data-ml/python/docker-graphene/environment.sh b/ppml/trusted-big-data-ml/python/docker-graphene/environment.sh index ee9a11697ce..9414b1199fa 100755 --- a/ppml/trusted-big-data-ml/python/docker-graphene/environment.sh +++ b/ppml/trusted-big-data-ml/python/docker-graphene/environment.sh @@ -2,7 +2,7 @@ export MASTER=YOUR_MASTER_IP export WORKERS=(YOUR_WORKER_IP_1 YOUR_WORKER_IP_2 YOUR_WORKER_IP_3) -export TRUSTED_BIGDATA_ML_DOCKER=intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:0.14.0-SNAPSHOT +export TRUSTED_BIGDATA_ML_DOCKER=intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:2.0.0 export SOURCE_ENCLAVE_KEY_PATH=YOUR_LOCAL_ENCLAVE_KEY_PATH export SOURCE_KEYS_PATH=YOUR_LOCAL_KEYS_PATH diff --git a/ppml/trusted-big-data-ml/python/docker-graphene/start-scripts/start-spark-local-bigdl-sgx.sh b/ppml/trusted-big-data-ml/python/docker-graphene/start-scripts/start-spark-local-bigdl-sgx.sh index 37f45808ec7..4cc9e4bc6e5 100644 --- a/ppml/trusted-big-data-ml/python/docker-graphene/start-scripts/start-spark-local-bigdl-sgx.sh +++ b/ppml/trusted-big-data-ml/python/docker-graphene/start-scripts/start-spark-local-bigdl-sgx.sh @@ -2,7 +2,7 @@ cd /ppml/trusted-big-data-ml SGX=1 ./pal_loader bash -c "/opt/jdk8/bin/java -cp \ - '/ppml/trusted-big-data-ml/work/bigdl-0.14.0-SNAPSHOT/jars/*:/ppml/trusted-big-data-ml/work/spark-3.1.2/conf/:/ppml/trusted-big-data-ml/work/spark-3.1.2/jars/*' \ + '/ppml/trusted-big-data-ml/work/bigdl-2.0.0/jars/*:/ppml/trusted-big-data-ml/work/spark-3.1.2/conf/:/ppml/trusted-big-data-ml/work/spark-3.1.2/jars/*' \ -Xmx2g \ org.apache.spark.deploy.SparkSubmit \ --master 'local[4]' \ @@ -12,13 +12,13 @@ SGX=1 ./pal_loader bash -c "/opt/jdk8/bin/java -cp \ --conf spark.rpc.message.maxSize=190 \ --conf spark.network.timeout=10000000 \ --conf spark.executor.heartbeatInterval=10000000 \ - --properties-file /ppml/trusted-big-data-ml/work/bigdl-0.14.0-SNAPSHOT/conf/spark-bigdl.conf \ - --py-files local://${BIGDL_HOME}/python/bigdl-orca-spark_${SPARK_VERSION}-${BIGDL_VERSION}-python-api.zip,local://${BIGDL_HOME}/python/bigdl-dllib-spark_${SPARK_VERSION}-${BIGDL_VERSION}-python-api.zip,/ppml/trusted-big-data-ml/work/bigdl-0.14.0-SNAPSHOT/examples/dllib/lenet/lenet.py \ + --properties-file /ppml/trusted-big-data-ml/work/bigdl-2.0.0/conf/spark-bigdl.conf \ + --py-files local://${BIGDL_HOME}/python/bigdl-orca-spark_${SPARK_VERSION}-${BIGDL_VERSION}-python-api.zip,local://${BIGDL_HOME}/python/bigdl-dllib-spark_${SPARK_VERSION}-${BIGDL_VERSION}-python-api.zip,/ppml/trusted-big-data-ml/work/bigdl-2.0.0/examples/dllib/lenet/lenet.py \ --driver-cores 2 \ --total-executor-cores 2 \ --executor-cores 2 \ --executor-memory 8g \ - /ppml/trusted-big-data-ml/work/bigdl-0.14.0-SNAPSHOT/examples/dllib/lenet/lenet.py \ + /ppml/trusted-big-data-ml/work/bigdl-2.0.0/examples/dllib/lenet/lenet.py \ --dataPath /ppml/trusted-big-data-ml/work/data/mnist \ --maxEpoch 2" 2>&1 | tee test-bigdl-lenet-sgx.log && \ cat test-bigdl-lenet-sgx.log | egrep -a "Accuracy" diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/README.md b/ppml/trusted-big-data-ml/scala/docker-occlum/README.md index 454f8e09f13..2c590608439 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/README.md +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/README.md @@ -6,7 +6,7 @@ Pull image from dockerhub. ```bash -docker pull intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:0.14.0-SNAPSHOT +docker pull intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.0.0 ``` Also, you can build image with `build-docker-image.sh`. Configure environment variables in `Dockerfile` and `build-docker-image.sh`. diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/build-docker-image.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/build-docker-image.sh index aedb392fb74..ba579587bc7 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/build-docker-image.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/build-docker-image.sh @@ -12,4 +12,4 @@ sudo docker build \ --build-arg HTTPS_PROXY_PORT=$HTTPS_PROXY_PORT \ --build-arg no_proxy=x.x.x.x \ --build-arg SPARK_JAR_REPO_URL=$SPARK_JAR_REPO_URL \ - -t intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:0.14.0-SNAPSHOT -f ./Dockerfile . + -t intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.0.0 -f ./Dockerfile . diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/README.md b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/README.md index 65c7e03e04b..4c5523d319b 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/README.md +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/README.md @@ -3,12 +3,12 @@ ## Prerequisite * Check Kubernetes env or Install Kubernetes from [wiki](https://kubernetes.io/zh/docs/setup/production-environment) -* Prepare image `intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:0.14.0-SNAPSHOT` +* Prepare image `intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.0.0` 1. Pull image from Dockerhub ```bash -docker pull intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:0.14.0-SNAPSHOT +docker pull intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.0.0 ``` If Dockerhub is not accessable, we can build docker image with Dockerfile and modify the path in the build-docker-image.sh firstly. diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/executor.yaml b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/executor.yaml index 20c1e14f7fa..00eedf8c7ac 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/executor.yaml +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/executor.yaml @@ -6,7 +6,7 @@ metadata: spec: containers: - name: spark-example - image: intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:0.14.0-SNAPSHOT + image: intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.0.0 imagePullPolicy: Never volumeMounts: - name: sgx-enclave diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_gbt.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_gbt.sh index 1a502c26c44..b5a551dbeac 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_gbt.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_gbt.sh @@ -7,7 +7,7 @@ ${SPARK_HOME}/bin/spark-submit \ --class org.apache.spark.examples.ml.GBTExample \ --conf spark.executor.instances=1 \ --conf spark.rpc.netty.dispatcher.numThreads=32 \ - --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:0.14.0-SNAPSHOT \ + --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.0.0 \ --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \ --conf spark.kubernetes.executor.podNamePrefix="sparkgbt" \ --conf spark.kubernetes.executor.deleteOnTermination=false \ diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_lr.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_lr.sh index 9ad11d26a85..9464f820d1b 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_lr.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_lr.sh @@ -7,7 +7,7 @@ ${SPARK_HOME}/bin/spark-submit \ --class org.apache.spark.examples.ml.LogisticRegressionExample \ --conf spark.executor.instances=1 \ --conf spark.rpc.netty.dispatcher.numThreads=32 \ - --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:0.14.0-SNAPSHOT \ + --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.0.0 \ --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \ --conf spark.kubernetes.executor.podNamePrefix="sparklr" \ --conf spark.kubernetes.executor.deleteOnTermination=false \ diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_pi.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_pi.sh index f85281952bc..3a3060f3e23 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_pi.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_pi.sh @@ -7,7 +7,7 @@ ${SPARK_HOME}/bin/spark-submit \ --class org.apache.spark.examples.SparkPi \ --conf spark.executor.instances=1 \ --conf spark.rpc.netty.dispatcher.numThreads=32 \ - --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:0.14.0-SNAPSHOT \ + --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.0.0 \ --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \ --conf spark.kubernetes.executor.deleteOnTermination=false \ --conf spark.kubernetes.driver.podTemplateFile=./executor.yaml \ diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_sql.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_sql.sh index 05b01c03ef2..88199237d8d 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_sql.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_sql.sh @@ -7,7 +7,7 @@ ${SPARK_HOME}/bin/spark-submit \ --class org.apache.spark.examples.sql.SparkSQLExample \ --conf spark.executor.instances=1 \ --conf spark.rpc.netty.dispatcher.numThreads=32 \ - --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:0.14.0-SNAPSHOT \ + --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.0.0 \ --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \ --conf spark.kubernetes.executor.podNamePrefix="sparksql" \ --conf spark.kubernetes.executor.deleteOnTermination=false \ diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_xgboost.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_xgboost.sh index 5728eacf5f0..9ebb88b36b5 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_xgboost.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_xgboost.sh @@ -7,7 +7,7 @@ ${SPARK_HOME}/bin/spark-submit \ --class com.intel.analytics.bigdl.dllib.examples.nnframes.xgboost.xgbClassifierTrainingExample \ --conf spark.executor.instances=1 \ --conf spark.rpc.netty.dispatcher.numThreads=32 \ - --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:0.14.0-SNAPSHOT \ + --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.0.0 \ --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \ --conf spark.kubernetes.executor.deleteOnTermination=false \ --conf spark.kubernetes.driver.podTemplateFile=./executor.yaml \ @@ -19,5 +19,5 @@ ${SPARK_HOME}/bin/spark-submit \ --total-executor-cores 2 \ --driver-cores 1 \ --driver-memory 2g \ - local:/bin/jars/bigdl-dllib-spark_3.1.2-0.14.0-SNAPSHOT.jar \ + local:/bin/jars/bigdl-dllib-spark_3.1.2-2.0.0.jar \ /host/data/iris.data 2 100 /host/data/xgboost_model_to_be_saved diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/run_spark_on_occlum_glibc.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/run_spark_on_occlum_glibc.sh index 508c58e188b..611f8a1a0e3 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/run_spark_on_occlum_glibc.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/run_spark_on_occlum_glibc.sh @@ -235,7 +235,7 @@ run_spark_xgboost() { --executor-cores 2 \ --executor-memory 9G \ --driver-memory 2G \ - /bin/jars/bigdl-dllib-spark_3.1.2-0.14.0-SNAPSHOT.jar \ + /bin/jars/bigdl-dllib-spark_3.1.2-2.0.0.jar \ /host/data /host/data/model 2 100 2 } diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/start-spark-local.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/start-spark-local.sh index 5e605262860..d591522d0c2 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/start-spark-local.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/start-spark-local.sh @@ -12,5 +12,5 @@ sudo docker run -it \ -v data:/opt/occlum_spark/data \ -e LOCAL_IP=$LOCAL_IP \ -e SGX_MEM_SIZE=24GB \ - intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:0.14.0-SNAPSHOT \ + intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.0.0 \ bash /opt/run_spark_on_occlum_glibc.sh $1 && tail -f /dev/null diff --git a/ppml/trusted-realtime-ml/scala/docker-graphene/start-local-cluster-serving.sh b/ppml/trusted-realtime-ml/scala/docker-graphene/start-local-cluster-serving.sh index e7cee0a2dc1..ac6efff178c 100755 --- a/ppml/trusted-realtime-ml/scala/docker-graphene/start-local-cluster-serving.sh +++ b/ppml/trusted-realtime-ml/scala/docker-graphene/start-local-cluster-serving.sh @@ -21,7 +21,7 @@ sudo docker run -itd \ --name=trusted-cluster-serving-local \ -e LOCAL_IP=$LOCAL_IP \ -e CORE_NUM=30 \ - intelanalytics/bigdl-ppml-trusted-realtime-ml-scala-graphene:0.14.0-SNAPSHOT \ + intelanalytics/bigdl-ppml-trusted-realtime-ml-scala-graphene:2.0.0 \ bash -c "cd /ppml/trusted-realtime-ml/ && ./start-all.sh && tail -f /dev/null" sudo docker exec -i trusted-cluster-serving-local bash -c "mkdir /dev/sgx && \ diff --git a/ppml/trusted-realtime-ml/scala/docker-occlum/README.md b/ppml/trusted-realtime-ml/scala/docker-occlum/README.md index 4d15521ec42..e8d696661fc 100755 --- a/ppml/trusted-realtime-ml/scala/docker-occlum/README.md +++ b/ppml/trusted-realtime-ml/scala/docker-occlum/README.md @@ -7,7 +7,7 @@ Please pay attention to IP and path etc.. They should be changed to your own ser Pull image from Dockerhub ```bash -docker pull intelanalytics/bigdl-ppml-trusted-realtime-ml-scala-occlum:0.14.0-SNAPSHOT +docker pull intelanalytics/bigdl-ppml-trusted-realtime-ml-scala-occlum:2.0.0 ``` If Dockerhub is not accessable, we can build docker image with Dockerfile. Please modify the paths in `build-docker-image.sh`, then build docker image by running this command: diff --git a/ppml/trusted-realtime-ml/scala/docker-occlum/build-docker-image.sh b/ppml/trusted-realtime-ml/scala/docker-occlum/build-docker-image.sh index 66f925fc0eb..6caee5734db 100755 --- a/ppml/trusted-realtime-ml/scala/docker-occlum/build-docker-image.sh +++ b/ppml/trusted-realtime-ml/scala/docker-occlum/build-docker-image.sh @@ -11,4 +11,4 @@ sudo docker build \ --build-arg HTTP_PROXY_PORT=$HTTP_PROXY_PORT \ --build-arg HTTPS_PROXY_HOST=$HTTPS_PROXY_HOST \ --build-arg HTTPS_PROXY_PORT=$HTTPS_PROXY_PORT \ - -t intelanalytics/bigdl-ppml-trusted-realtime-ml-scala-occlum:0.14.0-SNAPSHOT -f ./Dockerfile . + -t intelanalytics/bigdl-ppml-trusted-realtime-ml-scala-occlum:2.0.0 -f ./Dockerfile . diff --git a/ppml/trusted-realtime-ml/scala/docker-occlum/environment.sh b/ppml/trusted-realtime-ml/scala/docker-occlum/environment.sh index 82e76eb6ddc..3b3cd17590e 100755 --- a/ppml/trusted-realtime-ml/scala/docker-occlum/environment.sh +++ b/ppml/trusted-realtime-ml/scala/docker-occlum/environment.sh @@ -3,7 +3,7 @@ export MASTER=YOUR_MASTER_IP export WORKERS=(YOUR_WORKER_IP_1 YOUR_WORKER_IP_2 YOUR_WORKER_IP_3) -export TRUSTED_CLUSTER_SERVING_DOCKER=intelanalytics/bigdl-ppml-trusted-realtime-ml-scala-occlum:0.14.0-SNAPSHOT +export TRUSTED_CLUSTER_SERVING_DOCKER=intelanalytics/bigdl-ppml-trusted-realtime-ml-scala-occlum:2.0.0 export SOURCE_KEYS_PATH=YOUR_LOCAL_KEYS_PATH export SOURCE_SECURE_PASSWORD_PATH=YOUR_LOCAL_SECURE_PASSWORD_PATH diff --git a/ppml/trusted-realtime-ml/scala/docker-occlum/start-local-cluster-serving.sh b/ppml/trusted-realtime-ml/scala/docker-occlum/start-local-cluster-serving.sh index 63155979158..ad821084e95 100755 --- a/ppml/trusted-realtime-ml/scala/docker-occlum/start-local-cluster-serving.sh +++ b/ppml/trusted-realtime-ml/scala/docker-occlum/start-local-cluster-serving.sh @@ -15,5 +15,5 @@ sudo docker run -itd \ --name=trusted-cluster-serving-local \ -e LOCAL_IP=$LOCAL_IP \ -e CORE_NUM=4 \ - intelanalytics/bigdl-ppml-trusted-realtime-ml-scala-occlum:0.14.0-SNAPSHOT \ + intelanalytics/bigdl-ppml-trusted-realtime-ml-scala-occlum:2.0.0 \ bash -c "export PATH=/opt/occlum/build/bin:$PATH && cd /opt/ && ./start-all.sh && tail -f /dev/null"