diff --git a/README.md b/README.md index a24a034..cfea29e 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # ONNX Runtime Server -[![ONNX Runtime](https://img.shields.io/github/v/release/microsoft/onnxruntime?filter=v1.20.1&label=ONNX%20Runtime)](https://github.com/microsoft/onnxruntime) +[![ONNX Runtime](https://img.shields.io/github/v/release/microsoft/onnxruntime?filter=v1.20.2&label=ONNX%20Runtime)](https://github.com/microsoft/onnxruntime) [![CMake on Linux](https://github.com/kibae/onnxruntime-server/actions/workflows/cmake-linux.yml/badge.svg)](https://github.com/kibae/onnxruntime-server/actions/workflows/cmake-linux.yml) [![CMake on MacOS](https://github.com/kibae/onnxruntime-server/actions/workflows/cmake-macos.yml/badge.svg)](https://github.com/kibae/onnxruntime-server/actions/workflows/cmake-macos.yml) [![CMake on Windows](https://github.com/kibae/onnxruntime-server/actions/workflows/cmake-windows.yml/badge.svg)](https://github.com/kibae/onnxruntime-server/actions/workflows/cmake-windows.yml) @@ -169,14 +169,14 @@ sudo cmake --install build --prefix /usr/local/onnxruntime-server - Docker hub: [kibaes/onnxruntime-server](https://hub.docker.com/r/kibaes/onnxruntime-server) - [ - `1.20.1b-linux-cuda12`](https://github.com/kibae/onnxruntime-server/blob/main/deploy/build-docker/linux-cuda12.dockerfile) + `1.20.2-linux-cuda12`](https://github.com/kibae/onnxruntime-server/blob/main/deploy/build-docker/linux-cuda12.dockerfile) amd64(CUDA 12.x, cuDNN 9.x) - [ - `1.20.1b-linux-cpu`](https://github.com/kibae/onnxruntime-server/blob/main/deploy/build-docker/linux-cpu.dockerfile) + `1.20.2-linux-cpu`](https://github.com/kibae/onnxruntime-server/blob/main/deploy/build-docker/linux-cpu.dockerfile) amd64, arm64 ```shell -DOCKER_IMAGE=kibae/onnxruntime-server:1.20.1b-linux-cuda12 # or kibae/onnxruntime-server:1.20.1b-linux-cpu +DOCKER_IMAGE=kibae/onnxruntime-server:1.20.2-linux-cuda12 # or kibae/onnxruntime-server:1.20.2-linux-cpu docker pull ${DOCKER_IMAGE} diff --git a/deploy/build-docker/README.md b/deploy/build-docker/README.md index 91448bb..124a312 100644 --- a/deploy/build-docker/README.md +++ b/deploy/build-docker/README.md @@ -2,7 +2,7 @@ ## x64 with CUDA -- [ONNX Runtime Binary](https://github.com/microsoft/onnxruntime/releases) v1.20.1b(latest) requires CUDA 11/12, cudnn 8/9. +- [ONNX Runtime Binary](https://github.com/microsoft/onnxruntime/releases) v1.20.2(latest) requires CUDA 11/12, cudnn 8/9. ``` $ ldd libonnxruntime_providers_cuda.so linux-vdso.so.1 (0x00007fffa4bf8000) diff --git a/deploy/build-docker/VERSION b/deploy/build-docker/VERSION index d0dd02a..6029717 100644 --- a/deploy/build-docker/VERSION +++ b/deploy/build-docker/VERSION @@ -1,2 +1,2 @@ -export VERSION=1.20.1b +export VERSION=1.20.2 export IMAGE_PREFIX=kibaes/onnxruntime-server diff --git a/deploy/build-docker/docker-compose.yaml b/deploy/build-docker/docker-compose.yaml index f1d1c25..c24ddd4 100644 --- a/deploy/build-docker/docker-compose.yaml +++ b/deploy/build-docker/docker-compose.yaml @@ -5,7 +5,7 @@ services: onnxruntime_server_simple: # After the docker container is up, you can use the REST API (http://localhost:8080). # API documentation will be available at http://localhost:8080/api-docs. - image: kibaes/onnxruntime-server:1.20.1b-linux-cuda12 + image: kibaes/onnxruntime-server:1.20.2-linux-cuda12 ports: - "8080:80" # for http backend volumes: @@ -29,7 +29,7 @@ services: onnxruntime_server_advanced: # After the docker container is up, you can use the REST API (http://localhost, https://localhost). # API documentation will be available at http://localhost/api-docs. - image: kibaes/onnxruntime-server:1.20.1b-linux-cuda12 + image: kibaes/onnxruntime-server:1.20.2-linux-cuda12 ports: - "80:80" # for http backend - "443:443" # for https backend diff --git a/docs/docker.md b/docs/docker.md index 54ead8a..b1fe209 100644 --- a/docs/docker.md +++ b/docs/docker.md @@ -5,8 +5,8 @@ # Supported tags and respective Dockerfile links -- [`1.20.1b-linux-cuda12`](https://github.com/kibae/onnxruntime-server/blob/main/deploy/build-docker/linux-cuda12.dockerfile) amd64(CUDA 12.x, cuDNN 9.x) -- [`1.20.1b-linux-cpu`](https://github.com/kibae/onnxruntime-server/blob/main/deploy/build-docker/linux-cpu.dockerfile) amd64, arm64 +- [`1.20.2-linux-cuda12`](https://github.com/kibae/onnxruntime-server/blob/main/deploy/build-docker/linux-cuda12.dockerfile) amd64(CUDA 12.x, cuDNN 9.x) +- [`1.20.2-linux-cpu`](https://github.com/kibae/onnxruntime-server/blob/main/deploy/build-docker/linux-cpu.dockerfile) amd64, arm64 # How to use this image @@ -28,7 +28,7 @@ - API documentation will be available at http://localhost/api-docs. ```shell -DOCKER_IMAGE=kibae/onnxruntime-server:1.20.1b-linux-cuda12 # or kibae/onnxruntime-server:1.20.1b-linux-cpu +DOCKER_IMAGE=kibae/onnxruntime-server:1.20.2-linux-cuda12 # or kibae/onnxruntime-server:1.20.2-linux-cpu docker pull ${DOCKER_IMAGE} @@ -69,7 +69,7 @@ services: onnxruntime_server_simple: # After the docker container is up, you can use the REST API (http://localhost:8080). # API documentation will be available at http://localhost:8080/api-docs. - image: kibaes/onnxruntime-server:1.20.1b-linux-cuda12 + image: kibaes/onnxruntime-server:1.20.2-linux-cuda12 ports: - "8080:80" # for http backend volumes: @@ -101,7 +101,7 @@ services: onnxruntime_server_advanced: # After the docker container is up, you can use the REST API (http://localhost, https://localhost). # API documentation wl be available at http://localhost/api-docs. - image: kibaes/onnxruntime-server:1.20.1b-linux-cuda12 + image: kibaes/onnxruntime-server:1.20.2-linux-cuda12 ports: - "80:80" # for http backend - "443:443" # for https backend diff --git a/docs/swagger/openapi.yaml b/docs/swagger/openapi.yaml index eec3490..a3f25d3 100644 --- a/docs/swagger/openapi.yaml +++ b/docs/swagger/openapi.yaml @@ -2,7 +2,7 @@ openapi: 3.0.3 info: title: ONNX Runtime Server description: |- - version: 1.20.1b + version: 1.20.2 externalDocs: description: ONNX Runtime Server url: https://github.com/kibae/onnxruntime-server diff --git a/src/test/test_lib_version.cpp b/src/test/test_lib_version.cpp index 2f8b4a1..cf29a53 100644 --- a/src/test/test_lib_version.cpp +++ b/src/test/test_lib_version.cpp @@ -6,5 +6,5 @@ #include "./test_common.hpp" TEST(test_lib_version, LibVersion) { - EXPECT_EQ(onnxruntime_server::onnx::version(), "1.20.1"); + EXPECT_EQ(onnxruntime_server::onnx::version(), "1.20.2"); }