From 53c61b65f2f8513abea6c6b4ff2f40e9341726ff Mon Sep 17 00:00:00 2001 From: kev Date: Fri, 10 Dec 2021 18:23:45 +0800 Subject: [PATCH] add tensorflow/serving --- README.md | 2 + nifi/README.md | 3 +- nifi/docker-compose-cluster.yml | 53 ++++++++++++++------------- nifi/docker-compose.yml | 6 ++- tensorflow/serving/README.md | 16 ++++++++ tensorflow/serving/docker-compose.yml | 12 ++++++ 6 files changed, 64 insertions(+), 28 deletions(-) create mode 100644 tensorflow/serving/README.md create mode 100644 tensorflow/serving/docker-compose.yml diff --git a/README.md b/README.md index 1da1e99..0a17575 100644 --- a/README.md +++ b/README.md @@ -428,6 +428,8 @@ A collection of delicious docker recipes. - [x] jira - [x] strapi/strapi - [x] amancevice/superset +- [x] tensorflow + - [x] serving - [x] tile38/tile38 - [x] traefik - [x] trinodb/trino diff --git a/nifi/README.md b/nifi/README.md index d9bc534..fc00026 100644 --- a/nifi/README.md +++ b/nifi/README.md @@ -6,7 +6,6 @@ and distribute data. ![](https://nifi.apache.org/assets/images/flow-th.png) - ## up and running ```bash @@ -18,4 +17,6 @@ $ curl http://127.0.0.1:8080/nifi/ $ curl http://127.0.0.1:18080/nifi-registry/ ``` +> :warning: upgrade seems very hard + [1]: https://nifi.apache.org diff --git a/nifi/docker-compose-cluster.yml b/nifi/docker-compose-cluster.yml index 499dddb..1683654 100644 --- a/nifi/docker-compose-cluster.yml +++ b/nifi/docker-compose-cluster.yml @@ -1,25 +1,28 @@ -nifi: - image: apache/nifi:1.11.4 - ports: - - "8080:8080" - - "8082:8082" - - "10000:10000" - environment: - - NIFI_WEB_HTTP_PORT=8080 - - NIFI_CLUSTER_IS_NODE=true - - NIFI_CLUSTER_NODE_PROTOCOL_PORT=8082 - - NIFI_REMOTE_INPUT_SOCKET_PORT=10000 - - NIFI_ZK_CONNECT_STRING=zookeeper1:2181,zookeeper2:2181,zookeeper3:2181 - - NIFI_ELECTION_MAX_WAIT=1 min - - NIFI_ELECTION_MAX_CANDIDATES=2 - - NIFI_JVM_HEAP_INIT=512m - - NIFI_JVM_HEAP_MAX=1g - extra_hosts: - - zookeeper1:10.0.0.21 - - zookeeper2:10.0.0.22 - - zookeeper3:10.0.0.23 - # nifi1:10.0.0.11 - - nifi2:10.0.0.12 - - nifi3:10.0.0.13 - hostname: nifi1 - restart: unless-stopped +version: "3.8" +services: + nifi: + image: apache/nifi:1.15.0 + ports: + - "8080:8080" + - "8082:8082" + - "10000:10000" + environment: + - NIFI_WEB_HTTP_PORT=8080 + - NIFI_WEB_PROXY_CONTEXT_PATH=// # FIX BUG + - NIFI_CLUSTER_IS_NODE=true + - NIFI_CLUSTER_NODE_PROTOCOL_PORT=8082 + - NIFI_REMOTE_INPUT_SOCKET_PORT=10000 + - NIFI_ZK_CONNECT_STRING=zookeeper1:2181,zookeeper2:2181,zookeeper3:2181 + - NIFI_ELECTION_MAX_WAIT=1 min + - NIFI_ELECTION_MAX_CANDIDATES=2 + - NIFI_JVM_HEAP_INIT=512m + - NIFI_JVM_HEAP_MAX=1g + extra_hosts: + - zookeeper1:10.0.0.21 + - zookeeper2:10.0.0.22 + - zookeeper3:10.0.0.23 + # nifi1:10.0.0.11 + - nifi2:10.0.0.12 + - nifi3:10.0.0.13 + hostname: nifi1 + restart: unless-stopped diff --git a/nifi/docker-compose.yml b/nifi/docker-compose.yml index b2c2a5e..2aefd1d 100644 --- a/nifi/docker-compose.yml +++ b/nifi/docker-compose.yml @@ -3,7 +3,7 @@ version: "3.8" services: nifi: - image: apache/nifi:1.12.1 + image: apache/nifi:1.15.0 ports: - "8080:8080" volumes: @@ -16,17 +16,19 @@ services: - ./data/nifi/logs:/opt/nifi/nifi-current/logs environment: - NIFI_WEB_HTTP_PORT=8080 + - NIFI_WEB_PROXY_CONTEXT_PATH=// # FIX BUG - NIFI_JVM_HEAP_INIT=8g - NIFI_JVM_HEAP_MAX=16g hostname: nifi restart: unless-stopped registry: - image: apache/nifi-registry:0.8.0 + image: apache/nifi-registry:1.15.0 ports: - "18080:18080" volumes: - ./data/registry/database:/opt/nifi-registry/nifi-registry-current/database - ./data/registry/flow_storage:/opt/nifi-registry/nifi-registry-current/flow_storage - ./data/registry/logs:/opt/nifi-registry/nifi-registry-current/logs + hostname: registry restart: unless-stopped diff --git a/tensorflow/serving/README.md b/tensorflow/serving/README.md new file mode 100644 index 0000000..d25f9d3 --- /dev/null +++ b/tensorflow/serving/README.md @@ -0,0 +1,16 @@ +tenserflow/serving +================== + +[TensorFlow Serving][1] is a flexible, high-performance serving system for machine learning models, designed for production environments. + +## up and running + +```bash +$ mkdir -p data +$ curl -sSL https://github.com/tensorflow/serving/archive/refs/heads/master.tar.gz | tar xz -C data serving/tensorflow_serving/servables/tensorflow/testdata --strip 5 +$ docker-compose up -d +$ curl -X POST http://localhost:8501/v1/models/half_plus_two:predict -d '{"instances": [1.0, 2.0, 5.0]}' +{ "predictions": [2.5, 3.0, 4.5] } +``` + +[1]: https://github.com/tensorflow/serving diff --git a/tensorflow/serving/docker-compose.yml b/tensorflow/serving/docker-compose.yml new file mode 100644 index 0000000..953351f --- /dev/null +++ b/tensorflow/serving/docker-compose.yml @@ -0,0 +1,12 @@ +version: "3.8" +services: + serving: + image: tensorflow/serving + ports: + - "8501:8501" + volumes: + - ./data/saved_model_half_plus_two_cpu:/models/half_plus_two + environment: + - MODEL_BASE_PATH=/models + - MODEL_NAME=half_plus_two + restart: unless-stopped