Skip to content

Commit

Permalink
Snakemake: Add mlperf tiny benchmarks (#321)
Browse files Browse the repository at this point in the history
It puts all mlperf tiny benchmarks in one folder, since this is actually possible, and should make it easier to make multiple benchmarks in parallel if desired at any point.
  • Loading branch information
JosseVanDelm authored Jan 2, 2025
1 parent 1c12f05 commit 5c448e6
Show file tree
Hide file tree
Showing 11 changed files with 149 additions and 58 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build-run-kernel-snake.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
cache: true
cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }}
- name: Build and run kernels
run: pixi run snakemake -k -j `nproc` all
run: pixi run snakemake -k -p -j `nproc` all
working-directory: kernels/${{ matrix.kernel }}
strategy:
matrix:
Expand Down
23 changes: 3 additions & 20 deletions .github/workflows/build-run-mlperf-tiny.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,29 +10,12 @@ on:
jobs:
build-and-run-networks:
runs-on: ubuntu-24.04
strategy:
matrix:
model:
- name: ad01_int8
dir: kernels/mlperf_tiny_ad01
- name: pretrainedResnet_quant
dir: kernels/mlperf_tiny_ic
- name: vww_96_int8
dir: kernels/mlperf_tiny_vww
- name: kws_ref_model
dir: kernels/mlperf_tiny_kws
steps:
- uses: actions/checkout@v3
- uses: prefix-dev/setup-pixi@v0.8.1
with:
cache: true
cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }}
- name: snax-opt network compilation
run: |
pixi run make ${{ matrix.model.name }}.o
working-directory: ${{ matrix.model.dir }}
- name: vanilla network compilation
run: |
pixi run make ${{ matrix.model.name }}.no-snax-opt.o
working-directory: ${{ matrix.model.dir }}

- name: Build and run kernels
run: pixi run snakemake -k -p -j `nproc` all
working-directory: kernels/mlperf_tiny
76 changes: 76 additions & 0 deletions kernels/mlperf_tiny/Snakefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
from util.get_model import get_model
from util.snake.configs import get_mlperf_tiny_config

config = get_mlperf_tiny_config()


module default_rules:
snakefile:
"../../util/snake/default_rules.smk"
config:
config


module tf_rules:
snakefile:
"../../util/snake/tensorflow.smk"
config:
config


use rule * from default_rules exclude compile_llvm_module, snax_opt_mlir as default_*


use rule * from tf_rules as tf_*


nets = ["pretrainedResnet_quant", "kws_ref_model", "vww_96_int8", "ad01_int8"]
extensions = [".o", ".no-snax-opt.o"]


rule all:
input:
expand("{network}{extension}", network=nets, extension=extensions),


rule get_mlperf_tiny_model:
output:
"{network}.tflite",
# We need to constrain this wildcard to inform snakemake of the limited
# possibilties this function can make
wildcard_constraints:
network="|".join(nets),
run:
get_model(wildcards.network, output[0])


rule snax_opt_mlir:
"""
Similar to snax_opt_mlir, but with print-op-generic
"""
input:
"{file}.preprocfinal.mlir",
output:
temp("{file}.snax-opt.mlir"),
shell:
"{config[snax-opt]} -p {config[snaxoptflags]} --print-op-generic -o {output} {input}"


# Override default rule, for output not to be temporary
rule compile_llvm_module:
input:
"{file}.ll12",
output:
"{file}.o",
shell:
"{config[cc]} {config[clangflags]} -x ir -c {input} -o {output}"


# Special preprocessing for anomaly detection
rule preprocess_anomaly_detection:
input:
"ad01_int8.mlir",
output:
"ad01_int8.preprocfinal.mlir",
shell:
"{config[snax-opt]} -p preprocess-mlperftiny --print-op-generic --allow-unregistered-dialect -o {output} {input}"
11 changes: 0 additions & 11 deletions kernels/mlperf_tiny_ad01/Makefile

This file was deleted.

7 changes: 0 additions & 7 deletions kernels/mlperf_tiny_ic/Makefile

This file was deleted.

7 changes: 0 additions & 7 deletions kernels/mlperf_tiny_kws/Makefile

This file was deleted.

7 changes: 0 additions & 7 deletions kernels/mlperf_tiny_vww/Makefile

This file was deleted.

8 changes: 4 additions & 4 deletions runtime/get_model.py → util/get_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,15 @@
def get_model(model: str, output_file: Path) -> None:
base_url = "https://github.com/mlcommons/tiny/raw/v1.0/benchmark/training"
match model:
case "anomaly_detection":
case "ad01_int8":
model_url = "/anomaly_detection/trained_models/ad01_int8.tflite"
case "keyword_spotting":
case "kws_ref_model":
model_url = "/keyword_spotting/trained_models/kws_ref_model.tflite"
case "image_classification":
case "pretrainedResnet_quant":
model_url = (
"/image_classification/trained_models/" "pretrainedResnet_quant.tflite"
)
case "visual_wake_words":
case "vww_96_int8":
model_url = "/visual_wake_words/trained_models/vww_96_int8.tflite"
case _:
raise Exception("No valid model selected")
Expand Down
36 changes: 35 additions & 1 deletion util/snake/configs.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
import os

from util.snake.flags import get_default_flags
from util.snake.flags import (
get_clang_flags,
get_default_flags,
get_mlir_postproc_flags,
get_mlir_preproc_flags,
)
from util.snake.paths import get_default_paths


Expand Down Expand Up @@ -44,3 +49,32 @@ def get_snax_alu_config():
config.update(get_default_flags(snitch_sw_path))
config["vltsim"] = snax_utils_path + "/snax-alu-rtl/bin/snitch_cluster.vlt"
return config


def get_mlperf_tiny_config():
config = {}
config.update(get_default_paths())
config.update({"clangflags": get_clang_flags()})
config.update(
{
"snaxoptflags": ",".join(
[
"dispatch-kernels",
"set-memory-space",
"set-memory-layout",
"realize-memref-casts",
"reuse-memref-allocs",
"insert-sync-barrier",
"dispatch-regions",
"linalg-to-library-call",
"snax-copy-to-dma",
"memref-to-snax",
"snax-to-func",
"clear-memory-space",
]
)
}
)
config.update({"mlirpostprocflags": get_mlir_postproc_flags()})
config.update({"mlirpreprocflags": get_mlir_preproc_flags()})
return config
14 changes: 14 additions & 0 deletions util/snake/default_rules.smk
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,20 @@ rule postprocess_mlir:
"{config[mlir-opt]} {config[mlirpostprocflags]} -o {output} {input}"


rule postprocess_no_snax_mlir:
"""
Bypass snax-opt, and apply various postprocessing transformations to mlir files with upstream mlir.
Goal is to lower everything to LLVM dialect after this step.
Options controlled with `mlirpostprocflags` defined in config.
"""
input:
"{file}.preprocfinal.mlir",
output:
temp("{file}.no-snax-opt.ll.mlir"),
shell:
"{config[mlir-opt]} {config[mlirpostprocflags]} -o {output} {input}"


rule translate_mlir:
"""
Translate MLIR LLVM dialect to actual LLVM.
Expand Down
16 changes: 16 additions & 0 deletions util/snake/tensorflow.smk
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
rule convert_tflite_to_tosa:
input:
"{file}.tflite",
output:
temp("{file}.mlir.bc"),
shell:
"../../runtime/tflite_to_tosa.py -c {input} -o {output} "


rule convert_mlir_bytecode_to_text:
input:
"{file}.mlir.bc",
output:
temp("{file}.mlir"),
shell:
"{config[mlir-opt]} --mlir-print-op-generic --mlir-print-local-scope -o {output} {input}"

0 comments on commit 5c448e6

Please sign in to comment.