Skip to content

Commit

Permalink
Make wasi-nn backends as separated shared libraries (bytecodealliance…
Browse files Browse the repository at this point in the history
…#3509)

- All files under *core/iwasm/libraries/wasi-nn* are compiled as shared libraries
- *wasi-nn.c* is shared between backends
- Every backend has a separated shared library
- If wasi-nn feature is enabled, iwasm will depend on shared library libiwasm.so
  instead of linking static library libvmlib.a
  • Loading branch information
lum1n0us authored Jun 14, 2024
1 parent 1434c45 commit f844b33
Show file tree
Hide file tree
Showing 20 changed files with 295 additions and 257 deletions.
3 changes: 0 additions & 3 deletions core/iwasm/aot/aot_runtime.c
Original file line number Diff line number Diff line change
Expand Up @@ -1944,9 +1944,6 @@ aot_deinstantiate(AOTModuleInstance *module_inst, bool is_sub_inst)
#endif

if (!is_sub_inst) {
#if WASM_ENABLE_WASI_NN != 0
wasi_nn_destroy((WASMModuleInstanceCommon *)module_inst);
#endif
wasm_native_call_context_dtors((WASMModuleInstanceCommon *)module_inst);
}

Expand Down
4 changes: 0 additions & 4 deletions core/iwasm/aot/aot_runtime.h
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,6 @@
#include "gc_export.h"
#endif

#if WASM_ENABLE_WASI_NN != 0
#include "../libraries/wasi-nn/src/wasi_nn_private.h"
#endif

#ifdef __cplusplus
extern "C" {
#endif
Expand Down
15 changes: 0 additions & 15 deletions core/iwasm/common/wasm_native.c
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,6 @@ get_spectest_export_apis(NativeSymbol **p_libc_builtin_apis);
uint32
get_libc_wasi_export_apis(NativeSymbol **p_libc_wasi_apis);

uint32_t
get_wasi_nn_export_apis(NativeSymbol **p_libc_wasi_apis);

uint32
get_base_lib_export_apis(NativeSymbol **p_base_lib_apis);

Expand Down Expand Up @@ -565,18 +562,6 @@ wasm_native_init()
goto fail;
#endif /* WASM_ENABLE_LIB_RATS */

#if WASM_ENABLE_WASI_NN != 0
n_native_symbols = get_wasi_nn_export_apis(&native_symbols);
#if WASM_ENABLE_WASI_EPHEMERAL_NN != 0
#define wasi_nn_module_name "wasi_ephemeral_nn"
#else /* WASM_ENABLE_WASI_EPHEMERAL_NN == 0 */
#define wasi_nn_module_name "wasi_nn"
#endif /* WASM_ENABLE_WASI_EPHEMERAL_NN != 0 */
if (!wasm_native_register_natives(wasi_nn_module_name, native_symbols,
n_native_symbols))
goto fail;
#endif

#if WASM_ENABLE_QUICK_AOT_ENTRY != 0
if (!quick_aot_entry_init()) {
#if WASM_ENABLE_SPEC_TEST != 0 || WASM_ENABLE_LIBC_BUILTIN != 0 \
Expand Down
3 changes: 0 additions & 3 deletions core/iwasm/interpreter/wasm_runtime.c
Original file line number Diff line number Diff line change
Expand Up @@ -3181,9 +3181,6 @@ wasm_deinstantiate(WASMModuleInstance *module_inst, bool is_sub_inst)
wasm_runtime_free(module_inst->c_api_func_imports);

if (!is_sub_inst) {
#if WASM_ENABLE_WASI_NN != 0
wasi_nn_destroy((WASMModuleInstanceCommon *)module_inst);
#endif
wasm_native_call_context_dtors((WASMModuleInstanceCommon *)module_inst);
}

Expand Down
4 changes: 0 additions & 4 deletions core/iwasm/interpreter/wasm_runtime.h
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,6 @@
#include "../common/wasm_runtime_common.h"
#include "../common/wasm_exec_env.h"

#if WASM_ENABLE_WASI_NN != 0
#include "../libraries/wasi-nn/src/wasi_nn_private.h"
#endif

#ifdef __cplusplus
extern "C" {
#endif
Expand Down
51 changes: 35 additions & 16 deletions core/iwasm/libraries/wasi-nn/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,28 @@

## How to use

### Host

Enable WASI-NN in the WAMR by spefiying it in the cmake building configuration as follows,

```
```cmake
set (WAMR_BUILD_WASI_NN 1)
```

The definition of the functions provided by WASI-NN is in the header file `core/iwasm/libraries/wasi-nn/wasi_nn.h`.
or in command line

```bash
$ cmake -DWAMR_BUILD_WASI_NN=1 <other options> ...
```

> ![Caution]
> If enable `WAMR_BUID_WASI_NN`, iwasm will link a shared WAMR library instead of a static one. Wasi-nn backends will be loaded dynamically at runtime. Users shall specify the path of the backend library and register it to the iwasm runtime with `--native-lib=<path of backend library>`. All shared libraries should be placed in the `LD_LIBRARY_PATH`.
### Wasm

The definition of functions provided by WASI-NN (Wasm imports) is in the header file _core/iwasm/libraries/wasi-nn/wasi_nn.h_.

By only including this file in your WASM application you will bind WASI-NN into your module.
By only including this file in a WASM application you will bind WASI-NN into your module.

## Tests

Expand All @@ -27,9 +40,8 @@ Build the runtime image for your execution target type.
- `vx-delegate`
- `tpu`

```
EXECUTION_TYPE=cpu
docker build -t wasi-nn-${EXECUTION_TYPE} -f core/iwasm/libraries/wasi-nn/test/Dockerfile.${EXECUTION_TYPE} .
```bash
EXECUTION_TYPE=cpu docker build -t wasi-nn-${EXECUTION_TYPE} -f core/iwasm/libraries/wasi-nn/test/Dockerfile.${EXECUTION_TYPE} .
```

### Build wasm app
Expand All @@ -50,56 +62,63 @@ If all the tests have run properly you will the the following message in the ter
Tests: passed!
```

> [!TIP]
> Use _libwasi-nn-tflite.so_ as an example. You shall use whatever you have built.
- CPU

```
```bash
docker run \
-v $PWD/core/iwasm/libraries/wasi-nn/test:/assets \
-v $PWD/core/iwasm/libraries/wasi-nn/test/models:/models \
wasi-nn-cpu \
--dir=/ \
--env="TARGET=cpu" \
--native-lib=/lib/libwasi-nn-tflite.so \
/assets/test_tensorflow.wasm
```

- (NVIDIA) GPU
- Requirements:
- [NVIDIA docker](https://github.com/NVIDIA/nvidia-docker).

```
```bash
docker run \
--runtime=nvidia \
-v $PWD/core/iwasm/libraries/wasi-nn/test:/assets \
-v $PWD/core/iwasm/libraries/wasi-nn/test/models:/models \
wasi-nn-nvidia-gpu \
--dir=/ \
--env="TARGET=gpu" \
--native-lib=/lib/libwasi-nn-tflite.so \
/assets/test_tensorflow.wasm
```

- vx-delegate for NPU (x86 simulator)

```
```bash
docker run \
-v $PWD/core/iwasm/libraries/wasi-nn/test:/assets \
wasi-nn-vx-delegate \
--dir=/ \
--env="TARGET=gpu" \
--native-lib=/lib/libwasi-nn-tflite.so \
/assets/test_tensorflow_quantized.wasm
```

- (Coral) TPU
- Requirements:
- [Coral USB](https://coral.ai/products/accelerator/).

```
```bash
docker run \
--privileged \
--device=/dev/bus/usb:/dev/bus/usb \
-v $PWD/core/iwasm/libraries/wasi-nn/test:/assets \
wasi-nn-tpu \
--dir=/ \
--env="TARGET=tpu" \
--native-lib=/lib/libwasi-nn-tflite.so \
/assets/test_tensorflow_quantized.wasm
```

Expand All @@ -120,20 +139,20 @@ Use [classification-example](https://github.com/bytecodealliance/wasi-nn/tree/ma
### Prepare the model and the wasm

``` bash
```bash
$ pwd
/workspaces/wasm-micro-runtime/core/iwasm/libraries/wasi-nn/test

$ docker build -t wasi-nn-example:v1.0 -f Dockerfile.wasi-nn-example .
```

There are model files(*mobilenet\**) and wasm files(*wasi-nn-example.wasm*) in the directory */workspaces/wasi-nn/rust/examples/classification-example/build* in the image of wasi-nn-example:v1.0.
There are model files(\*mobilenet\**) and wasm files(*wasi-nn-example.wasm*) in the directory */workspaces/wasi-nn/rust/examples/classification-example/build\* in the image of wasi-nn-example:v1.0.

### build iwasm and test

*TODO: May need alternative steps to build the iwasm and test in the container of wasi-nn-example:v1.0*
_TODO: May need alternative steps to build the iwasm and test in the container of wasi-nn-example:v1.0_

``` bash
```bash
$ pwd
/workspaces/wasm-micro-runtime

Expand All @@ -143,9 +162,9 @@ $ docker run --rm -it -v $(pwd):/workspaces/wasm-micro-runtime wasi-nn-example:v
> [!Caution]
> The following steps are executed in the container of wasi-nn-example:v1.0.
``` bash
```bash
$ cd /workspaces/wasm-micro-runtime/product-mini/platforms/linux
$ cmake -S . -B build -DWAMR_BUILD_WASI_NN=1 -DWAMR_BUILD_WASI_EPHEMERAL_NN=1
$ cmake --build build
$ ./build/iwasm -v=5 --map-dir=/workspaces/wasi-nn/rust/examples/classification-example/build/::fixture /workspaces/wasi-nn/rust/examples/classification-example/build/wasi-nn-example.wasm
```
```
56 changes: 45 additions & 11 deletions core/iwasm/libraries/wasi-nn/cmake/wasi_nn.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,51 @@ list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_LIST_DIR})
# Find tensorflow-lite
find_package(tensorflow_lite REQUIRED)

set(WASI_NN_ROOT_DIR ${CMAKE_CURRENT_LIST_DIR}/..)
set(WASI_NN_ROOT ${CMAKE_CURRENT_LIST_DIR}/..)

include_directories (${WASI_NN_ROOT_DIR}/include)
include_directories (${WASI_NN_ROOT_DIR}/src)
include_directories (${WASI_NN_ROOT_DIR}/src/utils)

set (
WASI_NN_SOURCES
${WASI_NN_ROOT_DIR}/src/wasi_nn.c
${WASI_NN_ROOT_DIR}/src/wasi_nn_tensorflowlite.cpp
${WASI_NN_ROOT_DIR}/src/utils/wasi_nn_app_native.c
#
# wasi-nn general
add_library(
wasi-nn-general
SHARED
${WASI_NN_ROOT}/src/wasi_nn.c
${WASI_NN_ROOT}/src/utils/wasi_nn_app_native.c
)
target_include_directories(
wasi-nn-general
PUBLIC
${WASI_NN_ROOT}/include
${WASI_NN_ROOT}/src
${WASI_NN_ROOT}/src/utils
)
target_link_libraries(
wasi-nn-general
PUBLIC
libiwasm
)
target_compile_definitions(
wasi-nn-general
PUBLIC
$<$<CONFIG:Debug>:NN_LOG_LEVEL=0>
$<$<CONFIG:Release>:NN_LOG_LEVEL=2>
)

set (WASI_NN_LIBS tensorflow-lite)
#
# wasi-nn backends
add_library(
wasi-nn-tflite
SHARED
${WASI_NN_ROOT}/src/wasi_nn_tensorflowlite.cpp
)
#target_link_options(
# wasi-nn-tflite
# PRIVATE
# -Wl,--whole-archive libwasi-nn-general.a
# -Wl,--no-whole-archive
#)
target_link_libraries(
wasi-nn-tflite
PUBLIC
tensorflow-lite
wasi-nn-general
)
58 changes: 0 additions & 58 deletions core/iwasm/libraries/wasi-nn/external/CMakeLists.txt

This file was deleted.

13 changes: 0 additions & 13 deletions core/iwasm/libraries/wasi-nn/external/README.md

This file was deleted.

Loading

0 comments on commit f844b33

Please sign in to comment.